From 9a17f4eb0d817535610aecd8008ac1a9ae16cd25 Mon Sep 17 00:00:00 2001 From: Matt Rothenberg Date: Wed, 21 Jul 2021 13:50:59 -0400 Subject: [PATCH] feat: add config option for Axios configuration --- action.yml | 3 + dist/LICENSE | 16 +- dist/index.js | 107973 +++++++++++++++++++++------------------- dist/index.js.map | 2 +- dist/post/index.js | 215 +- src/backends/http.ts | 49 +- src/config.ts | 2 + src/main.ts | 18 +- 8 files changed, 55761 insertions(+), 52517 deletions(-) diff --git a/action.yml b/action.yml index e5a8482..b93b311 100644 --- a/action.yml +++ b/action.yml @@ -2,6 +2,9 @@ name: 'Flat Data' description: 'The GitHub action which powers data fetching for Flat' author: 'GitHub OCTO' inputs: + axios_config: + description: 'A path (relative to the root of your repo) of a JSON file containing a subset of Axios configuration' + required: false downloaded_filename: description: 'The filename to use for writing data.' required: false diff --git a/dist/LICENSE b/dist/LICENSE index 9915c7f..f8e0839 100644 --- a/dist/LICENSE +++ b/dist/LICENSE @@ -15,15 +15,6 @@ MIT @actions/io MIT -The MIT License (MIT) - -Copyright 2019 GitHub - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. @azure/ms-rest-azure-env MIT @@ -3100,7 +3091,7 @@ pg MIT MIT License -Copyright (c) 2010 - 2021 Brian Carlson +Copyright (c) 2010 - 2020 Brian Carlson Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal @@ -3191,7 +3182,7 @@ pg-protocol MIT MIT License -Copyright (c) 2010 - 2021 Brian Carlson +Copyright (c) 2010 - 2020 Brian Carlson Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal @@ -4208,7 +4199,8 @@ MIT underscore MIT -Copyright (c) 2009-2021 Jeremy Ashkenas, Julian Gonggrijp, and DocumentCloud and Investigative Reporters & Editors +Copyright (c) 2009-2020 Jeremy Ashkenas, DocumentCloud and Investigative +Reporters & Editors Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation diff --git a/dist/index.js b/dist/index.js index 61ea981..abd800d 100644 --- a/dist/index.js +++ b/dist/index.js @@ -37,17 +37,36 @@ async function fetchHTTP(config) { core.info('Fetching: HTTP'); // Authorization headers const auth = { - headers: { - authorization: config.authorization, - } + authorization: config.authorization, }; - const headers = config.authorization ? auth : {}; + const authHeader = config.authorization ? auth : {}; + let response; try { - const response = await axios_1.default.get(config.http_url, { - method: 'get', - responseType: 'stream', - ...headers, - }); + if (config.axios_config) { + const axiosConfig = fs_1.default.readFileSync(config.axios_config, { + encoding: 'utf8', + }); + const parsed = JSON.parse(axiosConfig); + const combinedWithOtherConfigValues = { + ...parsed, + url: config.http_url, + headers: { + ...parsed.headers, + ...authHeader, + }, + responseType: 'stream', + }; + response = await axios_1.default(combinedWithOtherConfigValues); + } + else { + response = await axios_1.default.get(config.http_url, { + method: 'get', + responseType: 'stream', + headers: { + ...authHeader, + }, + }); + } const filename = config.downloaded_filename; const writer = fs_1.default.createWriteStream(filename); response.data.pipe(writer); @@ -241,7 +260,7 @@ var __importStar = (this && this.__importStar) || function (mod) { Object.defineProperty(exports, "__esModule", ({ value: true })); exports.isSQLConfig = exports.isHTTPConfig = exports.getConfig = void 0; const core = __importStar(__nccwpck_require__(42186)); -const z = __importStar(__nccwpck_require__(63301)); +const z = __importStar(__nccwpck_require__(30489)); const FormatEnum = z.enum(['csv', 'json']); const CommonConfigSchema = z.object({ downloaded_filename: z.string(), @@ -249,6 +268,7 @@ const CommonConfigSchema = z.object({ }); const HTTPConfigSchema = z .object({ + axios_config: z.string().optional(), http_url: z.string(), authorization: z.string().optional(), mask: z.string().optional(), // string array of secrets or boolean @@ -265,6 +285,7 @@ const ConfigSchema = z.union([HTTPConfigSchema, SQLConfigSchema]); function getConfig() { const raw = {}; const keys = [ + 'axios_config', 'downloaded_filename', 'http_url', 'authorization', @@ -480,15 +501,16 @@ async function run() { // if including a mask config then we can strip out secrets from the http_url sourceMasked = source; // if no secrets to mask then this is just source if (config.mask) { - if (config.mask === 'true' || config.mask === 'false') { // mask param is a string + if (config.mask === 'true' || config.mask === 'false') { + // mask param is a string shouldMask = JSON.parse(config.mask); // convert to boolean } else { try { const maskArray = JSON.parse(config.mask); maskArray.forEach((secretToMask) => { - const regex = new RegExp(secretToMask, "g"); - sourceMasked = sourceMasked.replace(regex, "***"); + const regex = new RegExp(secretToMask, 'g'); + sourceMasked = sourceMasked.replace(regex, '***'); }); } catch (error) { @@ -513,7 +535,7 @@ async function run() { core.debug(`Invoking ${config.postprocess} with ${filename}...`); try { const raw = child_process_1.execSync(`NO_COLOR=true deno run -q --allow-read --allow-write --allow-run --allow-net --allow-env --unstable ${config.postprocess} ${filename}`).toString(); - core.info("Deno output:"); + core.info('Deno output:'); core.info(raw); } catch (error) { @@ -567,27 +589,14 @@ run().catch(error => { "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.issue = exports.issueCommand = void 0; const os = __importStar(__nccwpck_require__(12087)); const utils_1 = __nccwpck_require__(5278); /** @@ -666,25 +675,6 @@ function escapeProperty(s) { "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -694,8 +684,14 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; const command_1 = __nccwpck_require__(87351); const file_command_1 = __nccwpck_require__(717); const utils_1 = __nccwpck_require__(5278); @@ -762,9 +758,7 @@ function addPath(inputPath) { } exports.addPath = addPath; /** - * Gets the value of an input. - * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. - * Returns an empty string if the value is not defined. + * Gets the value of an input. The value is also trimmed. * * @param name name of the input to get * @param options optional. See InputOptions. @@ -775,34 +769,9 @@ function getInput(name, options) { if (options && options.required && !val) { throw new Error(`Input required and not supplied: ${name}`); } - if (options && options.trimWhitespace === false) { - return val; - } return val.trim(); } exports.getInput = getInput; -/** - * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. - * Support boolean input list: `true | True | TRUE | false | False | FALSE` . - * The return value is also in boolean type. - * ref: https://yaml.org/spec/1.2/spec.html#id2804923 - * - * @param name name of the input to get - * @param options optional. See InputOptions. - * @returns boolean - */ -function getBooleanInput(name, options) { - const trueValue = ['true', 'True', 'TRUE']; - const falseValue = ['false', 'False', 'FALSE']; - const val = getInput(name, options); - if (trueValue.includes(val)) - return true; - if (falseValue.includes(val)) - return false; - throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` + - `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); -} -exports.getBooleanInput = getBooleanInput; /** * Sets the value of an output. * @@ -811,7 +780,6 @@ exports.getBooleanInput = getBooleanInput; */ // eslint-disable-next-line @typescript-eslint/no-explicit-any function setOutput(name, value) { - process.stdout.write(os.EOL); command_1.issueCommand('set-output', { name }, value); } exports.setOutput = setOutput; @@ -953,27 +921,14 @@ exports.getState = getState; "use strict"; // For internal use, subject to change. -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.issueCommand = void 0; // We use any as a valid input type /* eslint-disable @typescript-eslint/no-explicit-any */ const fs = __importStar(__nccwpck_require__(35747)); @@ -1004,7 +959,6 @@ exports.issueCommand = issueCommand; // We use any as a valid input type /* eslint-disable @typescript-eslint/no-explicit-any */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.toCommandValue = void 0; /** * Sanitizes an input into a string so it can be passed into issueCommand safely * @param input input to sanitize into a string @@ -1695,18 +1649,11 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; var _a; Object.defineProperty(exports, "__esModule", ({ value: true })); const assert_1 = __nccwpck_require__(42357); -const fs = __importStar(__nccwpck_require__(35747)); -const path = __importStar(__nccwpck_require__(85622)); +const fs = __nccwpck_require__(35747); +const path = __nccwpck_require__(85622); _a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink; exports.IS_WINDOWS = process.platform === 'win32'; function exists(fsPath) { @@ -1904,18 +1851,11 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; Object.defineProperty(exports, "__esModule", ({ value: true })); -const childProcess = __importStar(__nccwpck_require__(63129)); -const path = __importStar(__nccwpck_require__(85622)); +const childProcess = __nccwpck_require__(63129); +const path = __nccwpck_require__(85622); const util_1 = __nccwpck_require__(31669); -const ioUtil = __importStar(__nccwpck_require__(81962)); +const ioUtil = __nccwpck_require__(81962); const exec = util_1.promisify(childProcess.exec); /** * Copies a file or folder. @@ -2083,73 +2023,58 @@ function which(tool, check) { throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`); } } - return result; - } - const matches = yield findInPath(tool); - if (matches && matches.length > 0) { - return matches[0]; - } - return ''; - }); -} -exports.which = which; -/** - * Returns a list of all occurrences of the given tool on the system path. - * - * @returns Promise the paths of the tool - */ -function findInPath(tool) { - return __awaiter(this, void 0, void 0, function* () { - if (!tool) { - throw new Error("parameter 'tool' is required"); } - // build the list of extensions to try - const extensions = []; - if (ioUtil.IS_WINDOWS && process.env['PATHEXT']) { - for (const extension of process.env['PATHEXT'].split(path.delimiter)) { - if (extension) { - extensions.push(extension); + try { + // build the list of extensions to try + const extensions = []; + if (ioUtil.IS_WINDOWS && process.env.PATHEXT) { + for (const extension of process.env.PATHEXT.split(path.delimiter)) { + if (extension) { + extensions.push(extension); + } } } - } - // if it's rooted, return it if exists. otherwise return empty. - if (ioUtil.isRooted(tool)) { - const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); - if (filePath) { - return [filePath]; + // if it's rooted, return it if exists. otherwise return empty. + if (ioUtil.isRooted(tool)) { + const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); + if (filePath) { + return filePath; + } + return ''; } - return []; - } - // if any path separators, return empty - if (tool.includes(path.sep)) { - return []; - } - // build the list of directories - // - // Note, technically "where" checks the current directory on Windows. From a toolkit perspective, - // it feels like we should not do this. Checking the current directory seems like more of a use - // case of a shell, and the which() function exposed by the toolkit should strive for consistency - // across platforms. - const directories = []; - if (process.env.PATH) { - for (const p of process.env.PATH.split(path.delimiter)) { - if (p) { - directories.push(p); + // if any path separators, return empty + if (tool.includes('/') || (ioUtil.IS_WINDOWS && tool.includes('\\'))) { + return ''; + } + // build the list of directories + // + // Note, technically "where" checks the current directory on Windows. From a toolkit perspective, + // it feels like we should not do this. Checking the current directory seems like more of a use + // case of a shell, and the which() function exposed by the toolkit should strive for consistency + // across platforms. + const directories = []; + if (process.env.PATH) { + for (const p of process.env.PATH.split(path.delimiter)) { + if (p) { + directories.push(p); + } } } - } - // find all matches - const matches = []; - for (const directory of directories) { - const filePath = yield ioUtil.tryGetExecutablePath(path.join(directory, tool), extensions); - if (filePath) { - matches.push(filePath); + // return the first match + for (const directory of directories) { + const filePath = yield ioUtil.tryGetExecutablePath(directory + path.sep + tool, extensions); + if (filePath) { + return filePath; + } } + return ''; + } + catch (err) { + throw new Error(`which failed with message ${err.message}`); } - return matches; }); } -exports.findInPath = findInPath; +exports.which = which; function readCopyOptions(options) { const force = options.force == null ? true : options.force; const recursive = Boolean(options.recursive); @@ -2401,17 +2326,17 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } -var uuidv4 = _interopDefault(__nccwpck_require__(80824)); +var uuidv4 = _interopDefault(__nccwpck_require__(20621)); var axios = _interopDefault(__nccwpck_require__(96545)); var stream = __nccwpck_require__(92413); -var FormData = _interopDefault(__nccwpck_require__(69970)); -var tough = __nccwpck_require__(47372); +var FormData = _interopDefault(__nccwpck_require__(64334)); +var tough = __nccwpck_require__(68930); var tunnel = __nccwpck_require__(74294); var http = __nccwpck_require__(98605); var https = __nccwpck_require__(57211); var xml2js = __nccwpck_require__(66189); var os = __nccwpck_require__(12087); -var tslib = __nccwpck_require__(35390); +var tslib = __nccwpck_require__(75636); // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. @@ -6526,1868 +6451,2049 @@ exports.DomainCredentials = DomainCredentials; /***/ }), -/***/ 69970: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var CombinedStream = __nccwpck_require__(85443); -var util = __nccwpck_require__(31669); -var path = __nccwpck_require__(85622); -var http = __nccwpck_require__(98605); -var https = __nccwpck_require__(57211); -var parseUrl = __nccwpck_require__(78835).parse; -var fs = __nccwpck_require__(35747); -var mime = __nccwpck_require__(43583); -var asynckit = __nccwpck_require__(14812); -var populate = __nccwpck_require__(53647); - -// Public API -module.exports = FormData; - -// make it a Stream -util.inherits(FormData, CombinedStream); +/***/ 68930: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -/** - * Create readable "multipart/form-data" streams. - * Can be used to submit forms - * and file uploads to other web applications. +"use strict"; +/*! + * Copyright (c) 2015, Salesforce.com, Inc. + * All rights reserved. * - * @constructor - * @param {Object} options - Properties to be added/overriden for FormData and CombinedStream + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. */ -function FormData(options) { - if (!(this instanceof FormData)) { - return new FormData(); - } - - this._overheadLength = 0; - this._valueLength = 0; - this._valuesToMeasure = []; - CombinedStream.call(this); +var net = __nccwpck_require__(11631); +var urlParse = __nccwpck_require__(78835).parse; +var util = __nccwpck_require__(31669); +var pubsuffix = __nccwpck_require__(23005); +var Store = __nccwpck_require__(5340)/* .Store */ .y; +var MemoryCookieStore = __nccwpck_require__(96868)/* .MemoryCookieStore */ .m; +var pathMatch = __nccwpck_require__(55319)/* .pathMatch */ .U; +var VERSION = __nccwpck_require__(88263); - options = options || {}; - for (var option in options) { - this[option] = options[option]; - } +var punycode; +try { + punycode = __nccwpck_require__(94213); +} catch(e) { + console.warn("tough-cookie: can't load punycode; won't use punycode for domain normalization"); } -FormData.LINE_BREAK = '\r\n'; -FormData.DEFAULT_CONTENT_TYPE = 'application/octet-stream'; - -FormData.prototype.append = function(field, value, options) { - - options = options || {}; - - // allow filename as single option - if (typeof options == 'string') { - options = {filename: options}; - } +// From RFC6265 S4.1.1 +// note that it excludes \x3B ";" +var COOKIE_OCTETS = /^[\x21\x23-\x2B\x2D-\x3A\x3C-\x5B\x5D-\x7E]+$/; - var append = CombinedStream.prototype.append.bind(this); +var CONTROL_CHARS = /[\x00-\x1F]/; - // all that streamy business can't handle numbers - if (typeof value == 'number') { - value = '' + value; - } +// From Chromium // '\r', '\n' and '\0' should be treated as a terminator in +// the "relaxed" mode, see: +// https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/parsed_cookie.cc#L60 +var TERMINATORS = ['\n', '\r', '\0']; - // https://github.com/felixge/node-form-data/issues/38 - if (util.isArray(value)) { - // Please convert your array into string - // the way web server expects it - this._error(new Error('Arrays are not supported.')); - return; - } +// RFC6265 S4.1.1 defines path value as 'any CHAR except CTLs or ";"' +// Note ';' is \x3B +var PATH_VALUE = /[\x20-\x3A\x3C-\x7E]+/; - var header = this._multiPartHeader(field, value, options); - var footer = this._multiPartFooter(); +// date-time parsing constants (RFC6265 S5.1.1) - append(header); - append(value); - append(footer); +var DATE_DELIM = /[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]/; - // pass along options.knownLength - this._trackLength(header, value, options); +var MONTH_TO_NUM = { + jan:0, feb:1, mar:2, apr:3, may:4, jun:5, + jul:6, aug:7, sep:8, oct:9, nov:10, dec:11 }; +var NUM_TO_MONTH = [ + 'Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec' +]; +var NUM_TO_DAY = [ + 'Sun','Mon','Tue','Wed','Thu','Fri','Sat' +]; -FormData.prototype._trackLength = function(header, value, options) { - var valueLength = 0; +var MAX_TIME = 2147483647000; // 31-bit max +var MIN_TIME = 0; // 31-bit min - // used w/ getLengthSync(), when length is known. - // e.g. for streaming directly from a remote server, - // w/ a known file a size, and not wanting to wait for - // incoming file to finish to get its size. - if (options.knownLength != null) { - valueLength += +options.knownLength; - } else if (Buffer.isBuffer(value)) { - valueLength = value.length; - } else if (typeof value === 'string') { - valueLength = Buffer.byteLength(value); +/* + * Parses a Natural number (i.e., non-negative integer) with either the + * *DIGIT ( non-digit *OCTET ) + * or + * *DIGIT + * grammar (RFC6265 S5.1.1). + * + * The "trailingOK" boolean controls if the grammar accepts a + * "( non-digit *OCTET )" trailer. + */ +function parseDigits(token, minDigits, maxDigits, trailingOK) { + var count = 0; + while (count < token.length) { + var c = token.charCodeAt(count); + // "non-digit = %x00-2F / %x3A-FF" + if (c <= 0x2F || c >= 0x3A) { + break; + } + count++; } - this._valueLength += valueLength; - - // @check why add CRLF? does this account for custom/multiple CRLFs? - this._overheadLength += - Buffer.byteLength(header) + - FormData.LINE_BREAK.length; - - // empty or either doesn't have path or not an http response - if (!value || ( !value.path && !(value.readable && value.hasOwnProperty('httpVersion')) )) { - return; + // constrain to a minimum and maximum number of digits. + if (count < minDigits || count > maxDigits) { + return null; } - // no need to bother with the length - if (!options.knownLength) { - this._valuesToMeasure.push(value); + if (!trailingOK && count != token.length) { + return null; } -}; -FormData.prototype._lengthRetriever = function(value, callback) { - - if (value.hasOwnProperty('fd')) { - - // take read range into a account - // `end` = Infinity –> read file till the end - // - // TODO: Looks like there is bug in Node fs.createReadStream - // it doesn't respect `end` options without `start` options - // Fix it when node fixes it. - // https://github.com/joyent/node/issues/7819 - if (value.end != undefined && value.end != Infinity && value.start != undefined) { - - // when end specified - // no need to calculate range - // inclusive, starts with 0 - callback(null, value.end + 1 - (value.start ? value.start : 0)); + return parseInt(token.substr(0,count), 10); +} - // not that fast snoopy - } else { - // still need to fetch file size from fs - fs.stat(value.path, function(err, stat) { +function parseTime(token) { + var parts = token.split(':'); + var result = [0,0,0]; - var fileSize; + /* RF6256 S5.1.1: + * time = hms-time ( non-digit *OCTET ) + * hms-time = time-field ":" time-field ":" time-field + * time-field = 1*2DIGIT + */ - if (err) { - callback(err); - return; - } + if (parts.length !== 3) { + return null; + } - // update final size based on the range options - fileSize = stat.size - (value.start ? value.start : 0); - callback(null, fileSize); - }); + for (var i = 0; i < 3; i++) { + // "time-field" must be strictly "1*2DIGIT", HOWEVER, "hms-time" can be + // followed by "( non-digit *OCTET )" so therefore the last time-field can + // have a trailer + var trailingOK = (i == 2); + var num = parseDigits(parts[i], 1, 2, trailingOK); + if (num === null) { + return null; } + result[i] = num; + } - // or http response - } else if (value.hasOwnProperty('httpVersion')) { - callback(null, +value.headers['content-length']); + return result; +} - // or request stream http://github.com/mikeal/request - } else if (value.hasOwnProperty('httpModule')) { - // wait till response come back - value.on('response', function(response) { - value.pause(); - callback(null, +response.headers['content-length']); - }); - value.resume(); +function parseMonth(token) { + token = String(token).substr(0,3).toLowerCase(); + var num = MONTH_TO_NUM[token]; + return num >= 0 ? num : null; +} - // something else - } else { - callback('Unknown stream'); +/* + * RFC6265 S5.1.1 date parser (see RFC for full grammar) + */ +function parseDate(str) { + if (!str) { + return; } -}; -FormData.prototype._multiPartHeader = function(field, value, options) { - // custom header specified (as string)? - // it becomes responsible for boundary - // (e.g. to handle extra CRLFs on .NET servers) - if (typeof options.header == 'string') { - return options.header; + /* RFC6265 S5.1.1: + * 2. Process each date-token sequentially in the order the date-tokens + * appear in the cookie-date + */ + var tokens = str.split(DATE_DELIM); + if (!tokens) { + return; } - var contentDisposition = this._getContentDisposition(value, options); - var contentType = this._getContentType(value, options); + var hour = null; + var minute = null; + var second = null; + var dayOfMonth = null; + var month = null; + var year = null; - var contents = ''; - var headers = { - // add custom disposition as third element or keep it two elements if not - 'Content-Disposition': ['form-data', 'name="' + field + '"'].concat(contentDisposition || []), - // if no content type. allow it to be empty array - 'Content-Type': [].concat(contentType || []) - }; + for (var i=0; i= 70 && year <= 99) { + year += 1900; + } else if (year >= 0 && year <= 69) { + year += 2000; + } + } } } - return '--' + this.getBoundary() + FormData.LINE_BREAK + contents + FormData.LINE_BREAK; -}; + /* RFC 6265 S5.1.1 + * "5. Abort these steps and fail to parse the cookie-date if: + * * at least one of the found-day-of-month, found-month, found- + * year, or found-time flags is not set, + * * the day-of-month-value is less than 1 or greater than 31, + * * the year-value is less than 1601, + * * the hour-value is greater than 23, + * * the minute-value is greater than 59, or + * * the second-value is greater than 59. + * (Note that leap seconds cannot be represented in this syntax.)" + * + * So, in order as above: + */ + if ( + dayOfMonth === null || month === null || year === null || second === null || + dayOfMonth < 1 || dayOfMonth > 31 || + year < 1601 || + hour > 23 || + minute > 59 || + second > 59 + ) { + return; + } -FormData.prototype._getContentDisposition = function(value, options) { + return new Date(Date.UTC(year, month, dayOfMonth, hour, minute, second)); +} - var filename - , contentDisposition - ; +function formatDate(date) { + var d = date.getUTCDate(); d = d >= 10 ? d : '0'+d; + var h = date.getUTCHours(); h = h >= 10 ? h : '0'+h; + var m = date.getUTCMinutes(); m = m >= 10 ? m : '0'+m; + var s = date.getUTCSeconds(); s = s >= 10 ? s : '0'+s; + return NUM_TO_DAY[date.getUTCDay()] + ', ' + + d+' '+ NUM_TO_MONTH[date.getUTCMonth()] +' '+ date.getUTCFullYear() +' '+ + h+':'+m+':'+s+' GMT'; +} - if (typeof options.filepath === 'string') { - // custom filepath for relative paths - filename = path.normalize(options.filepath).replace(/\\/g, '/'); - } else if (options.filename || value.name || value.path) { - // custom filename take precedence - // formidable and the browser add a name property - // fs- and request- streams have path property - filename = path.basename(options.filename || value.name || value.path); - } else if (value.readable && value.hasOwnProperty('httpVersion')) { - // or try http response - filename = path.basename(value.client._httpMessage.path || ''); +// S5.1.2 Canonicalized Host Names +function canonicalDomain(str) { + if (str == null) { + return null; } + str = str.trim().replace(/^\./,''); // S4.1.2.3 & S5.2.3: ignore leading . - if (filename) { - contentDisposition = 'filename="' + filename + '"'; + // convert to IDN if any non-ASCII characters + if (punycode && /[^\u0001-\u007f]/.test(str)) { + str = punycode.toASCII(str); } - return contentDisposition; -}; - -FormData.prototype._getContentType = function(value, options) { - - // use custom content-type above all - var contentType = options.contentType; + return str.toLowerCase(); +} - // or try `name` from formidable, browser - if (!contentType && value.name) { - contentType = mime.lookup(value.name); +// S5.1.3 Domain Matching +function domainMatch(str, domStr, canonicalize) { + if (str == null || domStr == null) { + return null; } - - // or try `path` from fs-, request- streams - if (!contentType && value.path) { - contentType = mime.lookup(value.path); + if (canonicalize !== false) { + str = canonicalDomain(str); + domStr = canonicalDomain(domStr); } - // or if it's http-reponse - if (!contentType && value.readable && value.hasOwnProperty('httpVersion')) { - contentType = value.headers['content-type']; + /* + * "The domain string and the string are identical. (Note that both the + * domain string and the string will have been canonicalized to lower case at + * this point)" + */ + if (str == domStr) { + return true; } - // or guess it from the filepath or filename - if (!contentType && (options.filepath || options.filename)) { - contentType = mime.lookup(options.filepath || options.filename); - } + /* "All of the following [three] conditions hold:" (order adjusted from the RFC) */ - // fallback to the default content type if `value` is not simple value - if (!contentType && typeof value == 'object') { - contentType = FormData.DEFAULT_CONTENT_TYPE; + /* "* The string is a host name (i.e., not an IP address)." */ + if (net.isIP(str)) { + return false; } - return contentType; -}; + /* "* The domain string is a suffix of the string" */ + var idx = str.indexOf(domStr); + if (idx <= 0) { + return false; // it's a non-match (-1) or prefix (0) + } -FormData.prototype._multiPartFooter = function() { - return function(next) { - var footer = FormData.LINE_BREAK; + // e.g "a.b.c".indexOf("b.c") === 2 + // 5 === 3+2 + if (str.length !== domStr.length + idx) { // it's not a suffix + return false; + } - var lastPart = (this._streams.length === 0); - if (lastPart) { - footer += this._lastBoundary(); - } + /* "* The last character of the string that is not included in the domain + * string is a %x2E (".") character." */ + if (str.substr(idx-1,1) !== '.') { + return false; + } - next(footer); - }.bind(this); -}; + return true; +} -FormData.prototype._lastBoundary = function() { - return '--' + this.getBoundary() + '--' + FormData.LINE_BREAK; -}; -FormData.prototype.getHeaders = function(userHeaders) { - var header; - var formHeaders = { - 'content-type': 'multipart/form-data; boundary=' + this.getBoundary() - }; +// RFC6265 S5.1.4 Paths and Path-Match - for (header in userHeaders) { - if (userHeaders.hasOwnProperty(header)) { - formHeaders[header.toLowerCase()] = userHeaders[header]; - } +/* + * "The user agent MUST use an algorithm equivalent to the following algorithm + * to compute the default-path of a cookie:" + * + * Assumption: the path (and not query part or absolute uri) is passed in. + */ +function defaultPath(path) { + // "2. If the uri-path is empty or if the first character of the uri-path is not + // a %x2F ("/") character, output %x2F ("/") and skip the remaining steps. + if (!path || path.substr(0,1) !== "/") { + return "/"; } - return formHeaders; -}; + // "3. If the uri-path contains no more than one %x2F ("/") character, output + // %x2F ("/") and skip the remaining step." + if (path === "/") { + return path; + } -FormData.prototype.getBoundary = function() { - if (!this._boundary) { - this._generateBoundary(); + var rightSlash = path.lastIndexOf("/"); + if (rightSlash === 0) { + return "/"; } - return this._boundary; -}; + // "4. Output the characters of the uri-path from the first character up to, + // but not including, the right-most %x2F ("/")." + return path.slice(0, rightSlash); +} -FormData.prototype.getBuffer = function() { - var dataBuffer = new Buffer.alloc( 0 ); - var boundary = this.getBoundary(); +function trimTerminator(str) { + for (var t = 0; t < TERMINATORS.length; t++) { + var terminatorIdx = str.indexOf(TERMINATORS[t]); + if (terminatorIdx !== -1) { + str = str.substr(0,terminatorIdx); + } + } - // Create the form content. Add Line breaks to the end of data. - for (var i = 0, len = this._streams.length; i < len; i++) { - if (typeof this._streams[i] !== 'function') { + return str; +} - // Add content to the buffer. - if(Buffer.isBuffer(this._streams[i])) { - dataBuffer = Buffer.concat( [dataBuffer, this._streams[i]]); - }else { - dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(this._streams[i])]); - } +function parseCookiePair(cookiePair, looseMode) { + cookiePair = trimTerminator(cookiePair); - // Add break after content. - if (typeof this._streams[i] !== 'string' || this._streams[i].substring( 2, boundary.length + 2 ) !== boundary) { - dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(FormData.LINE_BREAK)] ); - } + var firstEq = cookiePair.indexOf('='); + if (looseMode) { + if (firstEq === 0) { // '=' is immediately at start + cookiePair = cookiePair.substr(1); + firstEq = cookiePair.indexOf('='); // might still need to split on '=' + } + } else { // non-loose mode + if (firstEq <= 0) { // no '=' or is at start + return; // needs to have non-empty "cookie-name" } } - // Add the footer and return the Buffer object. - return Buffer.concat( [dataBuffer, Buffer.from(this._lastBoundary())] ); -}; - -FormData.prototype._generateBoundary = function() { - // This generates a 50 character boundary similar to those used by Firefox. - // They are optimized for boyer-moore parsing. - var boundary = '--------------------------'; - for (var i = 0; i < 24; i++) { - boundary += Math.floor(Math.random() * 10).toString(16); + var cookieName, cookieValue; + if (firstEq <= 0) { + cookieName = ""; + cookieValue = cookiePair.trim(); + } else { + cookieName = cookiePair.substr(0, firstEq).trim(); + cookieValue = cookiePair.substr(firstEq+1).trim(); } - this._boundary = boundary; -}; + if (CONTROL_CHARS.test(cookieName) || CONTROL_CHARS.test(cookieValue)) { + return; + } -// Note: getLengthSync DOESN'T calculate streams length -// As workaround one can calculate file size manually -// and add it as knownLength option -FormData.prototype.getLengthSync = function() { - var knownLength = this._overheadLength + this._valueLength; + var c = new Cookie(); + c.key = cookieName; + c.value = cookieValue; + return c; +} - // Don't get confused, there are 3 "internal" streams for each keyval pair - // so it basically checks if there is any value added to the form - if (this._streams.length) { - knownLength += this._lastBoundary().length; +function parse(str, options) { + if (!options || typeof options !== 'object') { + options = {}; } + str = str.trim(); - // https://github.com/form-data/form-data/issues/40 - if (!this.hasKnownLength()) { - // Some async length retrievers are present - // therefore synchronous length calculation is false. - // Please use getLength(callback) to get proper length - this._error(new Error('Cannot calculate proper length in synchronous way.')); + // We use a regex to parse the "name-value-pair" part of S5.2 + var firstSemi = str.indexOf(';'); // S5.2 step 1 + var cookiePair = (firstSemi === -1) ? str : str.substr(0, firstSemi); + var c = parseCookiePair(cookiePair, !!options.loose); + if (!c) { + return; } - return knownLength; -}; + if (firstSemi === -1) { + return c; + } -// Public API to check if length of added values is known -// https://github.com/form-data/form-data/issues/196 -// https://github.com/form-data/form-data/issues/262 -FormData.prototype.hasKnownLength = function() { - var hasKnownLength = true; + // S5.2.3 "unparsed-attributes consist of the remainder of the set-cookie-string + // (including the %x3B (";") in question)." plus later on in the same section + // "discard the first ";" and trim". + var unparsed = str.slice(firstSemi + 1).trim(); - if (this._valuesToMeasure.length) { - hasKnownLength = false; + // "If the unparsed-attributes string is empty, skip the rest of these + // steps." + if (unparsed.length === 0) { + return c; } - return hasKnownLength; -}; - -FormData.prototype.getLength = function(cb) { - var knownLength = this._overheadLength + this._valueLength; + /* + * S5.2 says that when looping over the items "[p]rocess the attribute-name + * and attribute-value according to the requirements in the following + * subsections" for every item. Plus, for many of the individual attributes + * in S5.3 it says to use the "attribute-value of the last attribute in the + * cookie-attribute-list". Therefore, in this implementation, we overwrite + * the previous value. + */ + var cookie_avs = unparsed.split(';'); + while (cookie_avs.length) { + var av = cookie_avs.shift().trim(); + if (av.length === 0) { // happens if ";;" appears + continue; + } + var av_sep = av.indexOf('='); + var av_key, av_value; - if (this._streams.length) { - knownLength += this._lastBoundary().length; - } + if (av_sep === -1) { + av_key = av; + av_value = null; + } else { + av_key = av.substr(0,av_sep); + av_value = av.substr(av_sep+1); + } - if (!this._valuesToMeasure.length) { - process.nextTick(cb.bind(this, null, knownLength)); - return; - } + av_key = av_key.trim().toLowerCase(); - asynckit.parallel(this._valuesToMeasure, this._lengthRetriever, function(err, values) { - if (err) { - cb(err); - return; + if (av_value) { + av_value = av_value.trim(); } - values.forEach(function(length) { - knownLength += length; - }); + switch(av_key) { + case 'expires': // S5.2.1 + if (av_value) { + var exp = parseDate(av_value); + // "If the attribute-value failed to parse as a cookie date, ignore the + // cookie-av." + if (exp) { + // over and underflow not realistically a concern: V8's getTime() seems to + // store something larger than a 32-bit time_t (even with 32-bit node) + c.expires = exp; + } + } + break; - cb(null, knownLength); - }); -}; + case 'max-age': // S5.2.2 + if (av_value) { + // "If the first character of the attribute-value is not a DIGIT or a "-" + // character ...[or]... If the remainder of attribute-value contains a + // non-DIGIT character, ignore the cookie-av." + if (/^-?[0-9]+$/.test(av_value)) { + var delta = parseInt(av_value, 10); + // "If delta-seconds is less than or equal to zero (0), let expiry-time + // be the earliest representable date and time." + c.setMaxAge(delta); + } + } + break; -FormData.prototype.submit = function(params, cb) { - var request - , options - , defaults = {method: 'post'} - ; + case 'domain': // S5.2.3 + // "If the attribute-value is empty, the behavior is undefined. However, + // the user agent SHOULD ignore the cookie-av entirely." + if (av_value) { + // S5.2.3 "Let cookie-domain be the attribute-value without the leading %x2E + // (".") character." + var domain = av_value.trim().replace(/^\./, ''); + if (domain) { + // "Convert the cookie-domain to lower case." + c.domain = domain.toLowerCase(); + } + } + break; - // parse provided url if it's string - // or treat it as options object - if (typeof params == 'string') { + case 'path': // S5.2.4 + /* + * "If the attribute-value is empty or if the first character of the + * attribute-value is not %x2F ("/"): + * Let cookie-path be the default-path. + * Otherwise: + * Let cookie-path be the attribute-value." + * + * We'll represent the default-path as null since it depends on the + * context of the parsing. + */ + c.path = av_value && av_value[0] === "/" ? av_value : null; + break; - params = parseUrl(params); - options = populate({ - port: params.port, - path: params.pathname, - host: params.hostname, - protocol: params.protocol - }, defaults); + case 'secure': // S5.2.5 + /* + * "If the attribute-name case-insensitively matches the string "Secure", + * the user agent MUST append an attribute to the cookie-attribute-list + * with an attribute-name of Secure and an empty attribute-value." + */ + c.secure = true; + break; - // use custom params - } else { + case 'httponly': // S5.2.6 -- effectively the same as 'secure' + c.httpOnly = true; + break; - options = populate(params, defaults); - // if no port provided use default one - if (!options.port) { - options.port = options.protocol == 'https:' ? 443 : 80; + default: + c.extensions = c.extensions || []; + c.extensions.push(av); + break; } } - // put that good code in getHeaders to some use - options.headers = this.getHeaders(params.headers); + return c; +} - // https if specified, fallback to http in any other case - if (options.protocol == 'https:') { - request = https.request(options); - } else { - request = http.request(options); +// avoid the V8 deoptimization monster! +function jsonParse(str) { + var obj; + try { + obj = JSON.parse(str); + } catch (e) { + return e; } + return obj; +} - // get content length and fire away - this.getLength(function(err, length) { - if (err) { - this._error(err); - return; - } - - // add content length - request.setHeader('Content-Length', length); +function fromJSON(str) { + if (!str) { + return null; + } - this.pipe(request); - if (cb) { - request.on('error', cb); - request.on('response', cb.bind(this, null)); + var obj; + if (typeof str === 'string') { + obj = jsonParse(str); + if (obj instanceof Error) { + return null; } - }.bind(this)); - - return request; -}; - -FormData.prototype._error = function(err) { - if (!this.error) { - this.error = err; - this.pause(); - this.emit('error', err); + } else { + // assume it's an Object + obj = str; } -}; - -FormData.prototype.toString = function () { - return '[object FormData]'; -}; + var c = new Cookie(); + for (var i=0; i { + return c; +} -// populates missing values -module.exports = function(dst, src) { +/* Section 5.4 part 2: + * "* Cookies with longer paths are listed before cookies with + * shorter paths. + * + * * Among cookies that have equal-length path fields, cookies with + * earlier creation-times are listed before cookies with later + * creation-times." + */ - Object.keys(src).forEach(function(prop) - { - dst[prop] = dst[prop] || src[prop]; - }); +function cookieCompare(a,b) { + var cmp = 0; - return dst; -}; + // descending for length: b CMP a + var aPathLen = a.path ? a.path.length : 0; + var bPathLen = b.path ? b.path.length : 0; + cmp = bPathLen - aPathLen; + if (cmp !== 0) { + return cmp; + } + // ascending for time: a CMP b + var aTime = a.creation ? a.creation.getTime() : MAX_TIME; + var bTime = b.creation ? b.creation.getTime() : MAX_TIME; + cmp = aTime - bTime; + if (cmp !== 0) { + return cmp; + } -/***/ }), + // break ties for the same millisecond (precision of JavaScript's clock) + cmp = a.creationIndex - b.creationIndex; -/***/ 35390: -/***/ ((__unused_webpack_module, __webpack_exports__, __nccwpck_require__) => { - -"use strict"; -__nccwpck_require__.r(__webpack_exports__); -/* harmony export */ __nccwpck_require__.d(__webpack_exports__, { -/* harmony export */ "__extends": () => /* binding */ __extends, -/* harmony export */ "__assign": () => /* binding */ __assign, -/* harmony export */ "__rest": () => /* binding */ __rest, -/* harmony export */ "__decorate": () => /* binding */ __decorate, -/* harmony export */ "__param": () => /* binding */ __param, -/* harmony export */ "__metadata": () => /* binding */ __metadata, -/* harmony export */ "__awaiter": () => /* binding */ __awaiter, -/* harmony export */ "__generator": () => /* binding */ __generator, -/* harmony export */ "__createBinding": () => /* binding */ __createBinding, -/* harmony export */ "__exportStar": () => /* binding */ __exportStar, -/* harmony export */ "__values": () => /* binding */ __values, -/* harmony export */ "__read": () => /* binding */ __read, -/* harmony export */ "__spread": () => /* binding */ __spread, -/* harmony export */ "__spreadArrays": () => /* binding */ __spreadArrays, -/* harmony export */ "__await": () => /* binding */ __await, -/* harmony export */ "__asyncGenerator": () => /* binding */ __asyncGenerator, -/* harmony export */ "__asyncDelegator": () => /* binding */ __asyncDelegator, -/* harmony export */ "__asyncValues": () => /* binding */ __asyncValues, -/* harmony export */ "__makeTemplateObject": () => /* binding */ __makeTemplateObject, -/* harmony export */ "__importStar": () => /* binding */ __importStar, -/* harmony export */ "__importDefault": () => /* binding */ __importDefault, -/* harmony export */ "__classPrivateFieldGet": () => /* binding */ __classPrivateFieldGet, -/* harmony export */ "__classPrivateFieldSet": () => /* binding */ __classPrivateFieldSet -/* harmony export */ }); -/*! ***************************************************************************** -Copyright (c) Microsoft Corporation. - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR -OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. -***************************************************************************** */ -/* global Reflect, Promise */ - -var extendStatics = function(d, b) { - extendStatics = Object.setPrototypeOf || - ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || - function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; - return extendStatics(d, b); -}; - -function __extends(d, b) { - extendStatics(d, b); - function __() { this.constructor = d; } - d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); -} - -var __assign = function() { - __assign = Object.assign || function __assign(t) { - for (var s, i = 1, n = arguments.length; i < n; i++) { - s = arguments[i]; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; - } - return t; - } - return __assign.apply(this, arguments); -} - -function __rest(s, e) { - var t = {}; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) - t[p] = s[p]; - if (s != null && typeof Object.getOwnPropertySymbols === "function") - for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { - if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) - t[p[i]] = s[p[i]]; - } - return t; -} - -function __decorate(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); - else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; -} - -function __param(paramIndex, decorator) { - return function (target, key) { decorator(target, key, paramIndex); } -} - -function __metadata(metadataKey, metadataValue) { - if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); -} - -function __awaiter(thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -} - -function __generator(thisArg, body) { - var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; - return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; - function verb(n) { return function (v) { return step([n, v]); }; } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (_) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: case 1: t = op; break; - case 4: _.label++; return { value: op[1], done: false }; - case 5: _.label++; y = op[1]; op = [0]; continue; - case 7: op = _.ops.pop(); _.trys.pop(); continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } - if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } - if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } - if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } - if (t[2]) _.ops.pop(); - _.trys.pop(); continue; - } - op = body.call(thisArg, _); - } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } - if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; - } -} - -function __createBinding(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -} - -function __exportStar(m, exports) { - for (var p in m) if (p !== "default" && !exports.hasOwnProperty(p)) exports[p] = m[p]; -} - -function __values(o) { - var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; - if (m) return m.call(o); - if (o && typeof o.length === "number") return { - next: function () { - if (o && i >= o.length) o = void 0; - return { value: o && o[i++], done: !o }; - } - }; - throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); -} - -function __read(o, n) { - var m = typeof Symbol === "function" && o[Symbol.iterator]; - if (!m) return o; - var i = m.call(o), r, ar = [], e; - try { - while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); - } - catch (error) { e = { error: error }; } - finally { - try { - if (r && !r.done && (m = i["return"])) m.call(i); - } - finally { if (e) throw e.error; } - } - return ar; -} - -function __spread() { - for (var ar = [], i = 0; i < arguments.length; i++) - ar = ar.concat(__read(arguments[i])); - return ar; -} - -function __spreadArrays() { - for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; - for (var r = Array(s), k = 0, i = 0; i < il; i++) - for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) - r[k] = a[j]; - return r; -}; - -function __await(v) { - return this instanceof __await ? (this.v = v, this) : new __await(v); -} - -function __asyncGenerator(thisArg, _arguments, generator) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var g = generator.apply(thisArg, _arguments || []), i, q = []; - return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; - function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } - function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } - function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } - function fulfill(value) { resume("next", value); } - function reject(value) { resume("throw", value); } - function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } -} - -function __asyncDelegator(o) { - var i, p; - return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; - function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } -} - -function __asyncValues(o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } -} - -function __makeTemplateObject(cooked, raw) { - if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } - return cooked; -}; - -function __importStar(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result.default = mod; - return result; -} - -function __importDefault(mod) { - return (mod && mod.__esModule) ? mod : { default: mod }; -} - -function __classPrivateFieldGet(receiver, privateMap) { - if (!privateMap.has(receiver)) { - throw new TypeError("attempted to get private field on non-instance"); - } - return privateMap.get(receiver); -} - -function __classPrivateFieldSet(receiver, privateMap, value) { - if (!privateMap.has(receiver)) { - throw new TypeError("attempted to set private field on non-instance"); - } - privateMap.set(receiver, value); - return value; -} + return cmp; +} +// Gives the permutation of all possible pathMatch()es of a given path. The +// array is in longest-to-shortest order. Handy for indexing. +function permutePath(path) { + if (path === '/') { + return ['/']; + } + if (path.lastIndexOf('/') === path.length-1) { + path = path.substr(0,path.length-1); + } + var permutations = [path]; + while (path.length > 1) { + var lindex = path.lastIndexOf('/'); + if (lindex === 0) { + break; + } + path = path.substr(0,lindex); + permutations.push(path); + } + permutations.push('/'); + return permutations; +} -/***/ }), +function getCookieContext(url) { + if (url instanceof Object) { + return url; + } + // NOTE: decodeURI will throw on malformed URIs (see GH-32). + // Therefore, we will just skip decoding for such URIs. + try { + url = decodeURI(url); + } + catch(err) { + // Silently swallow error + } -/***/ 2265: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + return urlParse(url); +} -"use strict"; +function Cookie(options) { + options = options || {}; -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const fs_1 = __nccwpck_require__(35747); -const crypto_1 = __nccwpck_require__(76417); -const applicationTokenCredentialsBase_1 = __nccwpck_require__(60968); -const authConstants_1 = __nccwpck_require__(26700); -class ApplicationTokenCertificateCredentials extends applicationTokenCredentialsBase_1.ApplicationTokenCredentialsBase { - /** - * Creates a new ApplicationTokenCredentials object. - * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} - * for detailed instructions on creating an Azure Active Directory application. - * @constructor - * @param {string} clientId The active directory application client id. - * @param {string} domain The domain or tenant id containing this application. - * @param {string} certificate A PEM encoded certificate private key. - * @param {string} thumbprint A hex encoded thumbprint of the certificate. - * @param {string} [tokenAudience] The audience for which the token is requested. Valid values are 'graph', 'batch', or any other resource like 'https://vault.azure.net/'. - * If tokenAudience is 'graph' then domain should also be provided and its value should not be the default 'common' tenant. It must be a string (preferrably in a guid format). - * @param {Environment} [environment] The azure environment to authenticate with. - * @param {object} [tokenCache] The token cache. Default value is the MemoryCache object from adal. - */ - constructor(clientId, domain, certificate, thumbprint, tokenAudience, environment, tokenCache) { - if (!certificate || typeof certificate.valueOf() !== "string") { - throw new Error("certificate must be a non empty string."); - } - if (!thumbprint || typeof thumbprint.valueOf() !== "string") { - throw new Error("thumbprint must be a non empty string."); - } - super(clientId, domain, tokenAudience, environment, tokenCache); - this.certificate = certificate; - this.thumbprint = thumbprint; - } - /** - * Tries to get the token from cache initially. If that is unsuccessfull then it tries to get the token from ADAL. - * @returns {Promise} A promise that resolves to TokenResponse and rejects with an Error. - */ - getToken() { - return __awaiter(this, void 0, void 0, function* () { - try { - const tokenResponse = yield this.getTokenFromCache(); - return tokenResponse; - } - catch (error) { - if (error.message.startsWith(authConstants_1.AuthConstants.SDK_INTERNAL_ERROR)) { - return Promise.reject(error); - } - return new Promise((resolve, reject) => { - const resource = this.getActiveDirectoryResourceId(); - this.authContext.acquireTokenWithClientCertificate(resource, this.clientId, this.certificate, this.thumbprint, (error, tokenResponse) => { - if (error) { - return reject(error); - } - if (tokenResponse.error || tokenResponse.errorDescription) { - return reject(tokenResponse); - } - return resolve(tokenResponse); - }); - }); - } - }); - } - /** - * Creates a new instance of ApplicationTokenCertificateCredentials. - * - * @param clientId The active directory application client id also known as the SPN (ServicePrincipal Name). - * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} - * for an example. - * @param {string} certificateStringOrFilePath A PEM encoded certificate and private key OR an absolute filepath to the .pem file containing that information. For example: - * - CertificateString: "-----BEGIN PRIVATE KEY-----\n\n-----END PRIVATE KEY-----\n-----BEGIN CERTIFICATE-----\n\n-----END CERTIFICATE-----\n" - * - CertificateFilePath: **Absolute** file path of the .pem file. - * @param domain The domain or tenant id containing this application. - * @param options AzureTokenCredentialsOptions - Object representing optional parameters. - * - * @returns ApplicationTokenCertificateCredentials - */ - static create(clientId, certificateStringOrFilePath, domain, options) { - if (!certificateStringOrFilePath || - typeof certificateStringOrFilePath.valueOf() !== "string") { - throw new Error("'certificateStringOrFilePath' must be a non empty string."); - } - if (!certificateStringOrFilePath.startsWith("-----BEGIN")) { - certificateStringOrFilePath = fs_1.readFileSync(certificateStringOrFilePath, "utf8"); - } - const certificatePattern = /(-+BEGIN CERTIFICATE-+)(\n\r?|\r\n?)([A-Za-z0-9\+\/\n\r]+\=*)(\n\r?|\r\n?)(-+END CERTIFICATE-+)/; - const matchCert = certificateStringOrFilePath.match(certificatePattern); - const rawCertificate = matchCert ? matchCert[3] : ""; - if (!rawCertificate) { - throw new Error("Unable to correctly parse the certificate from the value provided in 'certificateStringOrFilePath' "); - } - const thumbprint = crypto_1.createHash("sha1") - .update(Buffer.from(rawCertificate, "base64")) - .digest("hex"); - return new ApplicationTokenCertificateCredentials(clientId, domain, certificateStringOrFilePath, thumbprint, options.tokenAudience, options.environment, options.tokenCache); + Object.keys(options).forEach(function(prop) { + if (Cookie.prototype.hasOwnProperty(prop) && + Cookie.prototype[prop] !== options[prop] && + prop.substr(0,1) !== '_') + { + this[prop] = options[prop]; } + }, this); + + this.creation = this.creation || new Date(); + + // used to break creation ties in cookieCompare(): + Object.defineProperty(this, 'creationIndex', { + configurable: false, + enumerable: false, // important for assert.deepEqual checks + writable: true, + value: ++Cookie.cookiesCreated + }); } -exports.ApplicationTokenCertificateCredentials = ApplicationTokenCertificateCredentials; -//# sourceMappingURL=applicationTokenCertificateCredentials.js.map -/***/ }), +Cookie.cookiesCreated = 0; // incremented each time a cookie is created -/***/ 35216: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +Cookie.parse = parse; +Cookie.fromJSON = fromJSON; -"use strict"; +Cookie.prototype.key = ""; +Cookie.prototype.value = ""; -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); +// the order in which the RFC has them: +Cookie.prototype.expires = "Infinity"; // coerces to literal Infinity +Cookie.prototype.maxAge = null; // takes precedence over expires for TTL +Cookie.prototype.domain = null; +Cookie.prototype.path = null; +Cookie.prototype.secure = false; +Cookie.prototype.httpOnly = false; +Cookie.prototype.extensions = null; + +// set by the CookieJar: +Cookie.prototype.hostOnly = null; // boolean when set +Cookie.prototype.pathIsDefault = null; // boolean when set +Cookie.prototype.creation = null; // Date when set; defaulted by Cookie.parse +Cookie.prototype.lastAccessed = null; // Date when set +Object.defineProperty(Cookie.prototype, 'creationIndex', { + configurable: true, + enumerable: false, + writable: true, + value: 0 +}); + +Cookie.serializableProperties = Object.keys(Cookie.prototype) + .filter(function(prop) { + return !( + Cookie.prototype[prop] instanceof Function || + prop === 'creationIndex' || + prop.substr(0,1) === '_' + ); + }); + +Cookie.prototype.inspect = function inspect() { + var now = Date.now(); + return 'Cookie="'+this.toString() + + '; hostOnly='+(this.hostOnly != null ? this.hostOnly : '?') + + '; aAge='+(this.lastAccessed ? (now-this.lastAccessed.getTime())+'ms' : '?') + + '; cAge='+(this.creation ? (now-this.creation.getTime())+'ms' : '?') + + '"'; }; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const applicationTokenCredentialsBase_1 = __nccwpck_require__(60968); -const authConstants_1 = __nccwpck_require__(26700); -class ApplicationTokenCredentials extends applicationTokenCredentialsBase_1.ApplicationTokenCredentialsBase { - /** - * Creates a new ApplicationTokenCredentials object. - * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} - * for detailed instructions on creating an Azure Active Directory application. - * @constructor - * @param {string} clientId The active directory application client id. - * @param {string} domain The domain or tenant id containing this application. - * @param {string} secret The authentication secret for the application. - * @param {string} [tokenAudience] The audience for which the token is requested. Valid values are 'graph', 'batch', or any other resource like 'https://vault.azure.net/'. - * If tokenAudience is 'graph' then domain should also be provided and its value should not be the default 'common' tenant. It must be a string (preferrably in a guid format). - * @param {Environment} [environment] The azure environment to authenticate with. - * @param {object} [tokenCache] The token cache. Default value is the MemoryCache object from adal. - */ - constructor(clientId, domain, secret, tokenAudience, environment, tokenCache) { - if (!secret || typeof secret.valueOf() !== "string") { - throw new Error("secret must be a non empty string."); - } - super(clientId, domain, tokenAudience, environment, tokenCache); - this.secret = secret; - } - /** - * Tries to get the token from cache initially. If that is unsuccessfull then it tries to get the token from ADAL. - * @returns {Promise} A promise that resolves to TokenResponse and rejects with an Error. - */ - getToken() { - return __awaiter(this, void 0, void 0, function* () { - try { - const tokenResponse = yield this.getTokenFromCache(); - return tokenResponse; - } - catch (error) { - if (error.message && - error.message.startsWith(authConstants_1.AuthConstants.SDK_INTERNAL_ERROR)) { - return Promise.reject(error); - } - const resource = this.getActiveDirectoryResourceId(); - return new Promise((resolve, reject) => { - this.authContext.acquireTokenWithClientCredentials(resource, this.clientId, this.secret, (error, tokenResponse) => { - if (error) { - return reject(error); - } - if (tokenResponse.error || tokenResponse.errorDescription) { - return reject(tokenResponse); - } - return resolve(tokenResponse); - }); - }); - } - }); - } + +// Use the new custom inspection symbol to add the custom inspect function if +// available. +if (util.inspect.custom) { + Cookie.prototype[util.inspect.custom] = Cookie.prototype.inspect; } -exports.ApplicationTokenCredentials = ApplicationTokenCredentials; -//# sourceMappingURL=applicationTokenCredentials.js.map -/***/ }), +Cookie.prototype.toJSON = function() { + var obj = {}; -/***/ 60968: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + var props = Cookie.serializableProperties; + for (var i=0; i super.getTokenFromCache } - }); - return __awaiter(this, void 0, void 0, function* () { - const self = this; - // a thin wrapper over the base implementation. try get token from cache, additionaly clean up cache if required. - try { - const tokenResponse = yield _super.getTokenFromCache.call(this, undefined); - return Promise.resolve(tokenResponse); - } - catch (error) { - // Remove the stale token from the tokencache. ADAL gives the same error message "Entry not found in cache." - // for entry not being present in the cache and for accessToken being expired in the cache. We do not want the token cache - // to contain the expired token, we clean it up here. - const status = yield self.removeInvalidItemsFromCache({ - _clientId: self.clientId - }); - if (status.result) { - return Promise.reject(error); - } - const message = status && status.details && status.details.message - ? status.details.message - : status.details; - return Promise.reject(new Error(authConstants_1.AuthConstants.SDK_INTERNAL_ERROR + - " : " + - "critical failure while removing expired token for service principal from token cache. " + - message)); - } - }); + +Cookie.prototype.clone = function() { + return fromJSON(this.toJSON()); +}; + +Cookie.prototype.validate = function validate() { + if (!COOKIE_OCTETS.test(this.value)) { + return false; + } + if (this.expires != Infinity && !(this.expires instanceof Date) && !parseDate(this.expires)) { + return false; + } + if (this.maxAge != null && this.maxAge <= 0) { + return false; // "Max-Age=" non-zero-digit *DIGIT + } + if (this.path != null && !PATH_VALUE.test(this.path)) { + return false; + } + + var cdomain = this.cdomain(); + if (cdomain) { + if (cdomain.match(/\.$/)) { + return false; // S4.1.2.3 suggests that this is bad. domainMatch() tests confirm this } - /** - * Removes invalid items from token cache. This method is different. Here we never reject in case of error. - * Rather we resolve with an object that says the result is false and error information is provided in - * the details property of the resolved object. This is done to do better error handling in the above function - * where removeInvalidItemsFromCache() is called. - * @param {object} query The query to be used for finding the token for service principal from the cache - * @returns {result: boolean, details?: Error} resultObject with more info. - */ - removeInvalidItemsFromCache(query) { - const self = this; - return new Promise(resolve => { - self.tokenCache.find(query, (error, entries) => { - if (error) { - return resolve({ result: false, details: error }); - } - if (entries && entries.length > 0) { - return new Promise(resolve => { - return self.tokenCache.remove(entries, (err) => { - if (err) { - return resolve({ result: false, details: err }); - } - return resolve({ result: true }); - }); - }); - } - else { - return resolve({ result: true }); - } - }); - }); + var suffix = pubsuffix.getPublicSuffix(cdomain); + if (suffix == null) { // it's a public suffix + return false; } -} -exports.ApplicationTokenCredentialsBase = ApplicationTokenCredentialsBase; -//# sourceMappingURL=applicationTokenCredentialsBase.js.map + } + return true; +}; -/***/ }), +Cookie.prototype.setExpires = function setExpires(exp) { + if (exp instanceof Date) { + this.expires = exp; + } else { + this.expires = parseDate(exp) || "Infinity"; + } +}; -/***/ 38909: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +Cookie.prototype.setMaxAge = function setMaxAge(age) { + if (age === Infinity || age === -Infinity) { + this.maxAge = age.toString(); // so JSON.stringify() works + } else { + this.maxAge = age; + } +}; -"use strict"; +// gives Cookie header format +Cookie.prototype.cookieString = function cookieString() { + var val = this.value; + if (val == null) { + val = ''; + } + if (this.key === '') { + return val; + } + return this.key+'='+val; +}; -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); +// gives Set-Cookie header format +Cookie.prototype.toString = function toString() { + var str = this.cookieString(); + + if (this.expires != Infinity) { + if (this.expires instanceof Date) { + str += '; Expires='+formatDate(this.expires); + } else { + str += '; Expires='+this.expires; + } + } + + if (this.maxAge != null && this.maxAge != Infinity) { + str += '; Max-Age='+this.maxAge; + } + + if (this.domain && !this.hostOnly) { + str += '; Domain='+this.domain; + } + if (this.path) { + str += '; Path='+this.path; + } + + if (this.secure) { + str += '; Secure'; + } + if (this.httpOnly) { + str += '; HttpOnly'; + } + if (this.extensions) { + this.extensions.forEach(function(ext) { + str += '; '+ext; }); + } + + return str; }; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const ms_rest_js_1 = __nccwpck_require__(30812); -const login_1 = __nccwpck_require__(29325); -/** - * Describes the credentials by retrieving token via Azure CLI. - */ -class AzureCliCredentials { - constructor(subscriptionInfo, tokenInfo, - // tslint:disable-next-line: no-inferrable-types - resource = "https://management.azure.com") { - /** - * Azure resource endpoints. - * - Defaults to Azure Resource Manager from environment: AzureCloud. "https://management.azure.com" - * - For Azure KeyVault: "https://vault.azure.net" - * - For Azure Batch: "https://batch.core.windows.net" - * - For Azure Active Directory Graph: "https://graph.windows.net" - * - * To get the resource for other clouds: - * - `az cloud list` - */ - // tslint:disable-next-line: no-inferrable-types - this.resource = "https://management.azure.com"; - /** - * The number of seconds within which it is good to renew the token. - * A constant set to 270 seconds (4.5 minutes). - */ - this._tokenRenewalMarginInSeconds = 270; - this.subscriptionInfo = subscriptionInfo; - this.tokenInfo = tokenInfo; - this.resource = resource; + +// TTL() partially replaces the "expiry-time" parts of S5.3 step 3 (setCookie() +// elsewhere) +// S5.3 says to give the "latest representable date" for which we use Infinity +// For "expired" we use 0 +Cookie.prototype.TTL = function TTL(now) { + /* RFC6265 S4.1.2.2 If a cookie has both the Max-Age and the Expires + * attribute, the Max-Age attribute has precedence and controls the + * expiration date of the cookie. + * (Concurs with S5.3 step 3) + */ + if (this.maxAge != null) { + return this.maxAge<=0 ? 0 : this.maxAge*1000; + } + + var expires = this.expires; + if (expires != Infinity) { + if (!(expires instanceof Date)) { + expires = parseDate(expires) || Infinity; } - /** - * Tries to get the new token from Azure CLI, if the token has expired or the subscription has - * changed else uses the cached accessToken. - * @return The tokenResponse (tokenType and accessToken are the two important properties). - */ - getToken() { - return __awaiter(this, void 0, void 0, function* () { - if (this._hasTokenExpired() || this._hasSubscriptionChanged() || this._hasResourceChanged()) { - try { - // refresh the access token - this.tokenInfo = yield AzureCliCredentials.getAccessToken({ - subscriptionIdOrName: this.subscriptionInfo.id, - resource: this.resource - }); - } - catch (err) { - throw new Error(`An error occurred while refreshing the new access ` + - `token:${err.stderr ? err.stderr : err.message}`); - } - } - const result = { - accessToken: this.tokenInfo.accessToken, - tokenType: this.tokenInfo.tokenType, - expiresOn: this.tokenInfo.expiresOn, - tenantId: this.tokenInfo.tenant - }; - return result; - }); + + if (expires == Infinity) { + return Infinity; } - /** - * Signs a request with the Authentication header. - * @param The request to be signed. - */ - signRequest(webResource) { - return __awaiter(this, void 0, void 0, function* () { - const tokenResponse = yield this.getToken(); - webResource.headers.set(ms_rest_js_1.Constants.HeaderConstants.AUTHORIZATION, `${tokenResponse.tokenType} ${tokenResponse.accessToken}`); - return Promise.resolve(webResource); - }); + + return expires.getTime() - (now || Date.now()); + } + + return Infinity; +}; + +// expiryTime() replaces the "expiry-time" parts of S5.3 step 3 (setCookie() +// elsewhere) +Cookie.prototype.expiryTime = function expiryTime(now) { + if (this.maxAge != null) { + var relativeTo = now || this.creation || new Date(); + var age = (this.maxAge <= 0) ? -Infinity : this.maxAge*1000; + return relativeTo.getTime() + age; + } + + if (this.expires == Infinity) { + return Infinity; + } + return this.expires.getTime(); +}; + +// expiryDate() replaces the "expiry-time" parts of S5.3 step 3 (setCookie() +// elsewhere), except it returns a Date +Cookie.prototype.expiryDate = function expiryDate(now) { + var millisec = this.expiryTime(now); + if (millisec == Infinity) { + return new Date(MAX_TIME); + } else if (millisec == -Infinity) { + return new Date(MIN_TIME); + } else { + return new Date(millisec); + } +}; + +// This replaces the "persistent-flag" parts of S5.3 step 3 +Cookie.prototype.isPersistent = function isPersistent() { + return (this.maxAge != null || this.expires != Infinity); +}; + +// Mostly S5.1.2 and S5.2.3: +Cookie.prototype.cdomain = +Cookie.prototype.canonicalizedDomain = function canonicalizedDomain() { + if (this.domain == null) { + return null; + } + return canonicalDomain(this.domain); +}; + +function CookieJar(store, options) { + if (typeof options === "boolean") { + options = {rejectPublicSuffixes: options}; + } else if (options == null) { + options = {}; + } + if (options.rejectPublicSuffixes != null) { + this.rejectPublicSuffixes = options.rejectPublicSuffixes; + } + if (options.looseMode != null) { + this.enableLooseMode = options.looseMode; + } + + if (!store) { + store = new MemoryCookieStore(); + } + this.store = store; +} +CookieJar.prototype.store = null; +CookieJar.prototype.rejectPublicSuffixes = true; +CookieJar.prototype.enableLooseMode = false; +var CAN_BE_SYNC = []; + +CAN_BE_SYNC.push('setCookie'); +CookieJar.prototype.setCookie = function(cookie, url, options, cb) { + var err; + var context = getCookieContext(url); + if (options instanceof Function) { + cb = options; + options = {}; + } + + var host = canonicalDomain(context.hostname); + var loose = this.enableLooseMode; + if (options.loose != null) { + loose = options.loose; + } + + // S5.3 step 1 + if (!(cookie instanceof Cookie)) { + cookie = Cookie.parse(cookie, { loose: loose }); + } + if (!cookie) { + err = new Error("Cookie failed to parse"); + return cb(options.ignoreError ? null : err); + } + + // S5.3 step 2 + var now = options.now || new Date(); // will assign later to save effort in the face of errors + + // S5.3 step 3: NOOP; persistent-flag and expiry-time is handled by getCookie() + + // S5.3 step 4: NOOP; domain is null by default + + // S5.3 step 5: public suffixes + if (this.rejectPublicSuffixes && cookie.domain) { + var suffix = pubsuffix.getPublicSuffix(cookie.cdomain()); + if (suffix == null) { // e.g. "com" + err = new Error("Cookie has domain set to a public suffix"); + return cb(options.ignoreError ? null : err); } - _hasTokenExpired() { - let result = true; - const now = Math.floor(Date.now() / 1000); - if (this.tokenInfo.expiresOn && - this.tokenInfo.expiresOn instanceof Date && - Math.floor(this.tokenInfo.expiresOn.getTime() / 1000) - now > this._tokenRenewalMarginInSeconds) { - result = false; - } - return result; + } + + // S5.3 step 6: + if (cookie.domain) { + if (!domainMatch(host, cookie.cdomain(), false)) { + err = new Error("Cookie not in this host's domain. Cookie:"+cookie.cdomain()+" Request:"+host); + return cb(options.ignoreError ? null : err); } - _hasSubscriptionChanged() { - return this.subscriptionInfo.id !== this.tokenInfo.subscription; + + if (cookie.hostOnly == null) { // don't reset if already set + cookie.hostOnly = false; } - _parseToken() { - try { - const base64Url = this.tokenInfo.accessToken.split(".")[1]; - const base64 = decodeURIComponent(Buffer.from(base64Url, "base64").toString("binary").split("").map((c) => { - return "%" + ("00" + c.charCodeAt(0).toString(16)).slice(-2); - }).join("")); - return JSON.parse(base64); - } - catch (err) { - const msg = `An error occurred while parsing the access token: ${err.stack}`; - throw new Error(msg); - } + + } else { + cookie.hostOnly = true; + cookie.domain = host; + } + + //S5.2.4 If the attribute-value is empty or if the first character of the + //attribute-value is not %x2F ("/"): + //Let cookie-path be the default-path. + if (!cookie.path || cookie.path[0] !== '/') { + cookie.path = defaultPath(context.pathname); + cookie.pathIsDefault = true; + } + + // S5.3 step 8: NOOP; secure attribute + // S5.3 step 9: NOOP; httpOnly attribute + + // S5.3 step 10 + if (options.http === false && cookie.httpOnly) { + err = new Error("Cookie is HttpOnly and this isn't an HTTP API"); + return cb(options.ignoreError ? null : err); + } + + var store = this.store; + + if (!store.updateCookie) { + store.updateCookie = function(oldCookie, newCookie, cb) { + this.putCookie(newCookie, cb); + }; + } + + function withCookie(err, oldCookie) { + if (err) { + return cb(err); } - _isAzureResourceManagerEndpoint(newResource, currentResource) { - if (newResource.endsWith("/")) - newResource = newResource.slice(0, -1); - if (currentResource.endsWith("/")) - currentResource = currentResource.slice(0, -1); - return (newResource === "https://management.core.windows.net" && - currentResource === "https://management.azure.com") || - (newResource === "https://management.azure.com" && - currentResource === "https://management.core.windows.net"); + + var next = function(err) { + if (err) { + return cb(err); + } else { + cb(null, cookie); + } + }; + + if (oldCookie) { + // S5.3 step 11 - "If the cookie store contains a cookie with the same name, + // domain, and path as the newly created cookie:" + if (options.http === false && oldCookie.httpOnly) { // step 11.2 + err = new Error("old Cookie is HttpOnly and this isn't an HTTP API"); + return cb(options.ignoreError ? null : err); + } + cookie.creation = oldCookie.creation; // step 11.3 + cookie.creationIndex = oldCookie.creationIndex; // preserve tie-breaker + cookie.lastAccessed = now; + // Step 11.4 (delete cookie) is implied by just setting the new one: + store.updateCookie(oldCookie, cookie, next); // step 12 + + } else { + cookie.creation = cookie.lastAccessed = now; + store.putCookie(cookie, next); // step 12 } - _hasResourceChanged() { - const parsedToken = this._parseToken(); - // normalize the resource string, since it is possible to - // provide a resource without a trailing slash - const currentResource = parsedToken.aud && parsedToken.aud.endsWith("/") - ? parsedToken.aud.slice(0, -1) - : parsedToken.aud; - const newResource = this.resource.endsWith("/") - ? this.resource.slice(0, -1) - : this.resource; - const result = this._isAzureResourceManagerEndpoint(newResource, currentResource) - ? false - : currentResource !== newResource; - return result; + } + + store.findCookie(cookie.domain, cookie.path, cookie.key, withCookie); +}; + +// RFC6365 S5.4 +CAN_BE_SYNC.push('getCookies'); +CookieJar.prototype.getCookies = function(url, options, cb) { + var context = getCookieContext(url); + if (options instanceof Function) { + cb = options; + options = {}; + } + + var host = canonicalDomain(context.hostname); + var path = context.pathname || '/'; + + var secure = options.secure; + if (secure == null && context.protocol && + (context.protocol == 'https:' || context.protocol == 'wss:')) + { + secure = true; + } + + var http = options.http; + if (http == null) { + http = true; + } + + var now = options.now || Date.now(); + var expireCheck = options.expire !== false; + var allPaths = !!options.allPaths; + var store = this.store; + + function matchingCookie(c) { + // "Either: + // The cookie's host-only-flag is true and the canonicalized + // request-host is identical to the cookie's domain. + // Or: + // The cookie's host-only-flag is false and the canonicalized + // request-host domain-matches the cookie's domain." + if (c.hostOnly) { + if (c.domain != host) { + return false; + } + } else { + if (!domainMatch(host, c.domain, false)) { + return false; + } } - /** - * Gets the access token for the default or specified subscription. - * @param options Optional parameters that can be provided to get the access token. - */ - static getAccessToken(options = {}) { - return __awaiter(this, void 0, void 0, function* () { - try { - let cmd = "account get-access-token"; - if (options.subscriptionIdOrName) { - cmd += ` -s "${options.subscriptionIdOrName}"`; - } - if (options.resource) { - cmd += ` --resource ${options.resource}`; - } - const result = yield login_1.execAz(cmd); - result.expiresOn = new Date(result.expiresOn); - return result; - } - catch (err) { - const message = `An error occurred while getting credentials from ` + - `Azure CLI: ${err.stack}`; - throw new Error(message); - } - }); + + // "The request-uri's path path-matches the cookie's path." + if (!allPaths && !pathMatch(path, c.path)) { + return false; } - /** - * Gets the subscription from Azure CLI. - * @param subscriptionIdOrName - The name or id of the subscription for which the information is - * required. - */ - static getSubscription(subscriptionIdOrName) { - return __awaiter(this, void 0, void 0, function* () { - if (subscriptionIdOrName && (typeof subscriptionIdOrName !== "string" || !subscriptionIdOrName.length)) { - throw new Error("'subscriptionIdOrName' must be a non-empty string."); - } - try { - let cmd = "account show"; - if (subscriptionIdOrName) { - cmd += ` -s "${subscriptionIdOrName}"`; - } - const result = yield login_1.execAz(cmd); - return result; - } - catch (err) { - const message = `An error occurred while getting information about the current subscription from ` + - `Azure CLI: ${err.stack}`; - throw new Error(message); - } - }); + + // "If the cookie's secure-only-flag is true, then the request-uri's + // scheme must denote a "secure" protocol" + if (c.secure && !secure) { + return false; } - /** - * Sets the specified subscription as the default subscription for Azure CLI. - * @param subscriptionIdOrName The name or id of the subsciption that needs to be set as the - * default subscription. - */ - static setDefaultSubscription(subscriptionIdOrName) { - return __awaiter(this, void 0, void 0, function* () { - try { - yield login_1.execAz(`account set -s ${subscriptionIdOrName}`); - } - catch (err) { - const message = `An error occurred while setting the current subscription from ` + - `Azure CLI: ${err.stack}`; - throw new Error(message); - } - }); + + // "If the cookie's http-only-flag is true, then exclude the cookie if the + // cookie-string is being generated for a "non-HTTP" API" + if (c.httpOnly && !http) { + return false; } - /** - * Returns a list of all the subscriptions from Azure CLI. - * @param options Optional parameters that can be provided while listing all the subcriptions. - */ - static listAllSubscriptions(options = {}) { - return __awaiter(this, void 0, void 0, function* () { - let subscriptionList = []; - try { - let cmd = "account list"; - if (options.all) { - cmd += " --all"; - } - if (options.refresh) { - cmd += "--refresh"; - } - subscriptionList = yield login_1.execAz(cmd); - if (subscriptionList && subscriptionList.length) { - for (const sub of subscriptionList) { - if (sub.cloudName) { - sub.environmentName = sub.cloudName; - delete sub.cloudName; - } - } - } - return subscriptionList; - } - catch (err) { - const message = `An error occurred while getting a list of all the subscription from ` + - `Azure CLI: ${err.stack}`; - throw new Error(message); - } - }); + + // deferred from S5.3 + // non-RFC: allow retention of expired cookies by choice + if (expireCheck && c.expiryTime() <= now) { + store.removeCookie(c.domain, c.path, c.key, function(){}); // result ignored + return false; } - /** - * Provides credentials that can be used by the JS SDK to interact with Azure via azure cli. - * **Pre-requisite** - * - **install azure-cli** . For more information see - * {@link https://docs.microsoft.com/en-us/cli/azure/install-azure-cli?view=azure-cli-latest Install Azure CLI} - * - **login via `az login`** - * @param options - Optional parameters that can be provided while creating AzureCliCredentials. - */ - static create(options = {}) { - return __awaiter(this, void 0, void 0, function* () { - const [subscriptinInfo, accessToken] = yield Promise.all([ - AzureCliCredentials.getSubscription(options.subscriptionIdOrName), - AzureCliCredentials.getAccessToken(options) - ]); - return new AzureCliCredentials(subscriptinInfo, accessToken, options.resource); - }); + + return true; + } + + store.findCookies(host, allPaths ? null : path, function(err,cookies) { + if (err) { + return cb(err); } -} -exports.AzureCliCredentials = AzureCliCredentials; -//# sourceMappingURL=azureCliCredentials.js.map -/***/ }), + cookies = cookies.filter(matchingCookie); -/***/ 80450: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // sorting of S5.4 part 2 + if (options.sort !== false) { + cookies = cookies.sort(cookieCompare); + } -"use strict"; + // S5.4 part 3 + var now = new Date(); + cookies.forEach(function(c) { + c.lastAccessed = now; + }); + // TODO persist lastAccessed -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tokenCredentialsBase_1 = __nccwpck_require__(5558); -const authConstants_1 = __nccwpck_require__(26700); -class DeviceTokenCredentials extends tokenCredentialsBase_1.TokenCredentialsBase { - /** - * Creates a new DeviceTokenCredentials object that gets a new access token using userCodeInfo (contains user_code, device_code) - * for authenticating user on device. - * - * When this credential is used, the script will provide a url and code. The user needs to copy the url and the code, paste it - * in a browser and authenticate over there. If successful, the script will get the access token. - * - * @constructor - * @param {string} [clientId] The active directory application client id. - * @param {string} [domain] The domain or tenant id containing this application. Default value is "common" - * @param {string} [username] The user name for account in the form: "user@example.com". - * @param {string} [tokenAudience] The audience for which the token is requested. Valid values are 'graph', 'batch', or any other resource like 'https://vault.azure.net/'. - * If tokenAudience is 'graph' then domain should also be provided and its value should not be the default 'common' tenant. It must be a string (preferrably in a guid format). - * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} - * for an example. - * @param {Environment} [environment] The azure environment to authenticate with. Default environment is "Azure" popularly known as "Public Azure Cloud". - * @param {object} [tokenCache] The token cache. Default value is the MemoryCache object from adal. - */ - constructor(clientId, domain, username, tokenAudience, environment, tokenCache) { - if (!username) { - username = "user@example.com"; - } - if (!domain) { - domain = authConstants_1.AuthConstants.AAD_COMMON_TENANT; - } - if (!clientId) { - clientId = authConstants_1.AuthConstants.DEFAULT_ADAL_CLIENT_ID; - } - super(clientId, domain, tokenAudience, environment, tokenCache); - this.username = username; + cb(null,cookies); + }); +}; + +CAN_BE_SYNC.push('getCookieString'); +CookieJar.prototype.getCookieString = function(/*..., cb*/) { + var args = Array.prototype.slice.call(arguments,0); + var cb = args.pop(); + var next = function(err,cookies) { + if (err) { + cb(err); + } else { + cb(null, cookies + .sort(cookieCompare) + .map(function(c){ + return c.cookieString(); + }) + .join('; ')); } - getToken() { - // For device auth, this is just getTokenFromCache. - return this.getTokenFromCache(this.username); + }; + args.push(next); + this.getCookies.apply(this,args); +}; + +CAN_BE_SYNC.push('getSetCookieStrings'); +CookieJar.prototype.getSetCookieStrings = function(/*..., cb*/) { + var args = Array.prototype.slice.call(arguments,0); + var cb = args.pop(); + var next = function(err,cookies) { + if (err) { + cb(err); + } else { + cb(null, cookies.map(function(c){ + return c.toString(); + })); } -} -exports.DeviceTokenCredentials = DeviceTokenCredentials; -//# sourceMappingURL=deviceTokenCredentials.js.map + }; + args.push(next); + this.getCookies.apply(this,args); +}; -/***/ }), +CAN_BE_SYNC.push('serialize'); +CookieJar.prototype.serialize = function(cb) { + var type = this.store.constructor.name; + if (type === 'Object') { + type = null; + } -/***/ 23699: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // update README.md "Serialization Format" if you change this, please! + var serialized = { + // The version of tough-cookie that serialized this jar. Generally a good + // practice since future versions can make data import decisions based on + // known past behavior. When/if this matters, use `semver`. + version: 'tough-cookie@'+VERSION, -"use strict"; + // add the store type, to make humans happy: + storeType: type, -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -Object.defineProperty(exports, "__esModule", ({ value: true })); -const applicationTokenCredentials_1 = __nccwpck_require__(35216); -const applicationTokenCertificateCredentials_1 = __nccwpck_require__(2265); -const deviceTokenCredentials_1 = __nccwpck_require__(80450); -const msiAppServiceTokenCredentials_1 = __nccwpck_require__(68376); -const msiTokenCredentials_1 = __nccwpck_require__(6182); -const msiVmTokenCredentials_1 = __nccwpck_require__(73287); -const tokenCredentialsBase_1 = __nccwpck_require__(5558); -const userTokenCredentials_1 = __nccwpck_require__(14502); -const adal_node_1 = __nccwpck_require__(35894); -function createAuthenticator(credentials) { - const convertedCredentials = _convert(credentials); - const authenticator = _createAuthenticatorMapper(convertedCredentials); - return authenticator; -} -exports.createAuthenticator = createAuthenticator; -function _convert(credentials) { - if (credentials instanceof msiAppServiceTokenCredentials_1.MSIAppServiceTokenCredentials) { - return new msiAppServiceTokenCredentials_1.MSIAppServiceTokenCredentials({ - msiEndpoint: credentials.msiEndpoint, - msiSecret: credentials.msiSecret, - msiApiVersion: credentials.msiApiVersion, - resource: credentials.resource - }); - } - else if (credentials instanceof msiVmTokenCredentials_1.MSIVmTokenCredentials) { - return new msiVmTokenCredentials_1.MSIVmTokenCredentials({ - resource: credentials.resource, - msiEndpoint: credentials.msiEndpoint - }); - } - else if (credentials instanceof msiTokenCredentials_1.MSITokenCredentials) { - throw new Error("MSI-credentials not one of: MSIVmTokenCredentials, MSIAppServiceTokenCredentials"); - } - else { - return credentials; - } -} -function _createAuthenticatorMapper(credentials) { - return (challenge) => new Promise((resolve, reject) => { - // Function to take token Response and format a authorization value - const _formAuthorizationValue = (err, tokenResponse) => { - if (err) { - return reject(err); - } - if (tokenResponse.error) { - return reject(tokenResponse.error); - } - tokenResponse = tokenResponse; - // Calculate the value to be set in the request's Authorization header and resume the call. - const authorizationValue = tokenResponse.tokenType + " " + tokenResponse.accessToken; - return resolve(authorizationValue); - }; - // Create a new authentication context. - if (credentials instanceof tokenCredentialsBase_1.TokenCredentialsBase) { - const context = new adal_node_1.AuthenticationContext(challenge.authorization, true, credentials.authContext && credentials.authContext.cache); - if (credentials instanceof applicationTokenCredentials_1.ApplicationTokenCredentials) { - return context.acquireTokenWithClientCredentials(challenge.resource, credentials.clientId, credentials.secret, _formAuthorizationValue); - } - else if (credentials instanceof applicationTokenCertificateCredentials_1.ApplicationTokenCertificateCredentials) { - return context.acquireTokenWithClientCertificate(challenge.resource, credentials.clientId, credentials.certificate, credentials.thumbprint, _formAuthorizationValue); - } - else if (credentials instanceof userTokenCredentials_1.UserTokenCredentials) { - return context.acquireTokenWithUsernamePassword(challenge.resource, credentials.username, credentials.password, credentials.clientId, _formAuthorizationValue); - } - else if (credentials instanceof deviceTokenCredentials_1.DeviceTokenCredentials) { - return context.acquireToken(challenge.resource, credentials.username, credentials.clientId, _formAuthorizationValue); - } - } - else if (credentials instanceof msiTokenCredentials_1.MSITokenCredentials) { - return credentials.getToken(); - } - else { - return reject(new Error("credentials must be one of: ApplicationTokenCredentials, UserTokenCredentials, " + - "DeviceTokenCredentials, MSITokenCredentials")); - } - }); -} -//# sourceMappingURL=keyVaultFactory.js.map + // CookieJar configuration: + rejectPublicSuffixes: !!this.rejectPublicSuffixes, -/***/ }), + // this gets filled from getAllCookies: + cookies: [] + }; -/***/ 68376: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + if (!(this.store.getAllCookies && + typeof this.store.getAllCookies === 'function')) + { + return cb(new Error('store does not support getAllCookies and cannot be serialized')); + } -"use strict"; + this.store.getAllCookies(function(err,cookies) { + if (err) { + return cb(err); + } -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); + serialized.cookies = cookies.map(function(cookie) { + // convert to serialized 'raw' cookies + cookie = (cookie instanceof Cookie) ? cookie.toJSON() : cookie; + + // Remove the index so new ones get assigned during deserialization + delete cookie.creationIndex; + + return cookie; }); + + return cb(null, serialized); + }); }; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const msiTokenCredentials_1 = __nccwpck_require__(6182); -const ms_rest_js_1 = __nccwpck_require__(30812); -/** - * @class MSIAppServiceTokenCredentials - */ -class MSIAppServiceTokenCredentials extends msiTokenCredentials_1.MSITokenCredentials { - /** - * Creates an instance of MSIAppServiceTokenCredentials. - * @param {string} [options.msiEndpoint] - The local URL from which your app can request tokens. - * Either provide this parameter or set the environment variable `MSI_ENDPOINT`. - * For example: `MSI_ENDPOINT="http://127.0.0.1:41741/MSI/token/"` - * @param {string} [options.msiSecret] - The secret used in communication between your code and the local MSI agent. - * Either provide this parameter or set the environment variable `MSI_SECRET`. - * For example: `MSI_SECRET="69418689F1E342DD946CB82994CDA3CB"` - * @param {string} [options.resource] - The resource uri or token audience for which the token is needed. - * For e.g. it can be: - * - resource management endpoint "https://management.azure.com/" (default) - * - management endpoint "https://management.core.windows.net/" - * @param {string} [options.msiApiVersion] - The api-version of the local MSI agent. Default value is "2017-09-01". - */ - constructor(options) { - if (!options) - options = {}; - super(options); - options.msiEndpoint = options.msiEndpoint || process.env["MSI_ENDPOINT"]; - options.msiSecret = options.msiSecret || process.env["MSI_SECRET"]; - if (!options.msiEndpoint || (options.msiEndpoint && typeof options.msiEndpoint.valueOf() !== "string")) { - throw new Error('Either provide "msiEndpoint" as a property of the "options" object ' + - 'or set the environment variable "MSI_ENDPOINT" and it must be of type "string".'); - } - if (!options.msiSecret || (options.msiSecret && typeof options.msiSecret.valueOf() !== "string")) { - throw new Error('Either provide "msiSecret" as a property of the "options" object ' + - 'or set the environment variable "MSI_SECRET" and it must be of type "string".'); - } - if (!options.msiApiVersion) { - options.msiApiVersion = "2017-09-01"; - } - else if (typeof options.msiApiVersion.valueOf() !== "string") { - throw new Error("msiApiVersion must be a uri."); - } - this.msiEndpoint = options.msiEndpoint; - this.msiSecret = options.msiSecret; - this.msiApiVersion = options.msiApiVersion; - } - /** - * Prepares and sends a GET request to a service endpoint indicated by the app service, which responds with the access token. - * @return {Promise} Promise with the tokenResponse (tokenType and accessToken are the two important properties). - */ - getToken() { - return __awaiter(this, void 0, void 0, function* () { - const reqOptions = this.prepareRequestOptions(); - let opRes; - let result; - opRes = yield this._httpClient.sendRequest(reqOptions); - if (opRes.bodyAsText === undefined || opRes.bodyAsText.indexOf("ExceptionMessage") !== -1) { - throw new Error(`MSI: Failed to retrieve a token from "${reqOptions.url}" with an error: ${opRes.bodyAsText}`); - } - result = this.parseTokenResponse(opRes.bodyAsText); - if (!result.tokenType) { - throw new Error(`Invalid token response, did not find tokenType. Response body is: ${opRes.bodyAsText}`); - } - else if (!result.accessToken) { - throw new Error(`Invalid token response, did not find accessToken. Response body is: ${opRes.bodyAsText}`); - } - return result; - }); + +// well-known name that JSON.stringify calls +CookieJar.prototype.toJSON = function() { + return this.serializeSync(); +}; + +// use the class method CookieJar.deserialize instead of calling this directly +CAN_BE_SYNC.push('_importCookies'); +CookieJar.prototype._importCookies = function(serialized, cb) { + var jar = this; + var cookies = serialized.cookies; + if (!cookies || !Array.isArray(cookies)) { + return cb(new Error('serialized jar has no cookies array')); + } + cookies = cookies.slice(); // do not modify the original + + function putNext(err) { + if (err) { + return cb(err); } - prepareRequestOptions() { - const endpoint = this.msiEndpoint.endsWith("/") ? this.msiEndpoint : `${this.msiEndpoint}/`; - const resource = encodeURIComponent(this.resource); - const getUrl = `${endpoint}?resource=${resource}&api-version=${this.msiApiVersion}`; - const reqOptions = { - url: getUrl, - headers: { - "secret": this.msiSecret - }, - method: "GET" - }; - const webResource = new ms_rest_js_1.WebResource(); - return webResource.prepare(reqOptions); + + if (!cookies.length) { + return cb(err, jar); } -} -exports.MSIAppServiceTokenCredentials = MSIAppServiceTokenCredentials; -//# sourceMappingURL=msiAppServiceTokenCredentials.js.map -/***/ }), + var cookie; + try { + cookie = fromJSON(cookies.shift()); + } catch (e) { + return cb(e); + } -/***/ 6182: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + if (cookie === null) { + return putNext(null); // skip this cookie + } -"use strict"; + jar.store.putCookie(cookie, putNext); + } -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); + putNext(); }; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const ms_rest_js_1 = __nccwpck_require__(30812); -const authConstants_1 = __nccwpck_require__(26700); -/** - * @class MSITokenCredentials - Provides information about managed service identity token credentials. - * This object can only be used to acquire token on a virtual machine provisioned in Azure with managed service identity. - */ -class MSITokenCredentials { - /** - * Creates an instance of MSITokenCredentials. - * @param {object} [options] - Optional parameters - * @param {string} [options.resource] - The resource uri or token audience for which the token is needed. - * For e.g. it can be: - * - resource management endpoint "https://management.azure.com/"(default) - * - management endpoint "https://management.core.windows.net/" - */ - constructor(options) { - if (!options) - options = {}; - if (!options.resource) { - options.resource = authConstants_1.AuthConstants.RESOURCE_MANAGER_ENDPOINT; - } - else if (typeof options.resource.valueOf() !== "string") { - throw new Error("resource must be a uri."); - } - this.resource = options.resource; - this._httpClient = options.httpClient || new ms_rest_js_1.DefaultHttpClient(); - } - /** - * Parses a tokenResponse json string into a object, and converts properties on the first level to camelCase. - * This method tries to standardize the tokenResponse - * @param {string} body A json string - * @return {object} [tokenResponse] The tokenResponse (tokenType and accessToken are the two important properties). - */ - parseTokenResponse(body) { - // Docs show different examples of possible MSI responses for different services. https://docs.microsoft.com/en-us/azure/active-directory/managed-service-identity/overview - // expires_on - is a Date like string in this doc - // - https://docs.microsoft.com/en-us/azure/app-service/app-service-managed-service-identity#rest-protocol-examples - // In other doc it is stringified number. - // - https://docs.microsoft.com/en-us/azure/active-directory/managed-service-identity/tutorial-linux-vm-access-arm#get-an-access-token-using-the-vms-identity-and-use-it-to-call-resource-manager - const parsedBody = JSON.parse(body); - parsedBody.accessToken = parsedBody["access_token"]; - delete parsedBody["access_token"]; - parsedBody.tokenType = parsedBody["token_type"]; - delete parsedBody["token_type"]; - if (parsedBody["refresh_token"]) { - parsedBody.refreshToken = parsedBody["refresh_token"]; - delete parsedBody["refresh_token"]; - } - if (parsedBody["expires_in"]) { - parsedBody.expiresIn = parsedBody["expires_in"]; - if (typeof parsedBody["expires_in"] === "string") { - // normal number as a string '1504130527' - parsedBody.expiresIn = parseInt(parsedBody["expires_in"], 10); - } - delete parsedBody["expires_in"]; - } - if (parsedBody["not_before"]) { - parsedBody.notBefore = parsedBody["not_before"]; - if (typeof parsedBody["not_before"] === "string") { - // normal number as a string '1504130527' - parsedBody.notBefore = parseInt(parsedBody["not_before"], 10); - } - delete parsedBody["not_before"]; - } - if (parsedBody["expires_on"]) { - parsedBody.expiresOn = parsedBody["expires_on"]; - if (typeof parsedBody["expires_on"] === "string") { - // possibly a Date string '09/14/2017 00:00:00 PM +00:00' - if (parsedBody["expires_on"].includes(":") || parsedBody["expires_on"].includes("/")) { - parsedBody.expiresOn = new Date(parsedBody["expires_on"], 10); - } - else { - // normal number as a string '1504130527' - parsedBody.expiresOn = new Date(parseInt(parsedBody["expires_on"], 10)); - } - } - delete parsedBody["expires_on"]; - } - return parsedBody; - } - /** - * Signs a request with the Authentication header. - * - * @param {webResource} The WebResource to be signed. - * @return {Promise} Promise with signed WebResource. - */ - signRequest(webResource) { - return __awaiter(this, void 0, void 0, function* () { - const tokenResponse = yield this.getToken(); - webResource.headers.set(ms_rest_js_1.Constants.HeaderConstants.AUTHORIZATION, `${tokenResponse.tokenType} ${tokenResponse.accessToken}`); - return Promise.resolve(webResource); - }); + +CookieJar.deserialize = function(strOrObj, store, cb) { + if (arguments.length !== 3) { + // store is optional + cb = store; + store = null; + } + + var serialized; + if (typeof strOrObj === 'string') { + serialized = jsonParse(strOrObj); + if (serialized instanceof Error) { + return cb(serialized); } -} -exports.MSITokenCredentials = MSITokenCredentials; -//# sourceMappingURL=msiTokenCredentials.js.map + } else { + serialized = strOrObj; + } -/***/ }), + var jar = new CookieJar(store, serialized.rejectPublicSuffixes); + jar._importCookies(serialized, function(err) { + if (err) { + return cb(err); + } + cb(null, jar); + }); +}; -/***/ 73287: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +CookieJar.deserializeSync = function(strOrObj, store) { + var serialized = typeof strOrObj === 'string' ? + JSON.parse(strOrObj) : strOrObj; + var jar = new CookieJar(store, serialized.rejectPublicSuffixes); -"use strict"; + // catch this mistake early: + if (!jar.store.synchronous) { + throw new Error('CookieJar store is not synchronous; use async API instead.'); + } -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); + jar._importCookiesSync(serialized); + return jar; }; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const msiTokenCredentials_1 = __nccwpck_require__(6182); -const ms_rest_js_1 = __nccwpck_require__(30812); -/** - * @class MSIVmTokenCredentials - */ -class MSIVmTokenCredentials extends msiTokenCredentials_1.MSITokenCredentials { - constructor(options) { - if (!options) - options = {}; - super(options); - if (!options.msiEndpoint) { - options.msiEndpoint = "http://169.254.169.254/metadata/identity/oauth2/token"; - } - else if (typeof options.msiEndpoint !== "string") { - throw new Error("msiEndpoint must be a string."); - } - const urlBuilder = ms_rest_js_1.URLBuilder.parse(options.msiEndpoint); - if (!urlBuilder.getScheme()) { - options.msiEndpoint = `http://${options.msiEndpoint}`; - } - if (!options.apiVersion) { - options.apiVersion = "2018-02-01"; - } - else if (typeof options.apiVersion !== "string") { - throw new Error("apiVersion must be a string."); - } - if (!options.httpMethod) { - options.httpMethod = "GET"; - } - this.apiVersion = options.apiVersion; - this.msiEndpoint = options.msiEndpoint; - this.httpMethod = options.httpMethod; - this.objectId = options.objectId; - this.clientId = options.clientId; - this.identityId = options.identityId; +CookieJar.fromJSON = CookieJar.deserializeSync; + +CookieJar.prototype.clone = function(newStore, cb) { + if (arguments.length === 1) { + cb = newStore; + newStore = null; + } + + this.serialize(function(err,serialized) { + if (err) { + return cb(err); } - /** - * Prepares and sends a POST request to a service endpoint hosted on the Azure VM, which responds with the access token. - * @return {Promise} Promise with the tokenResponse (tokenType and accessToken are the two important properties). - */ - getToken() { - return __awaiter(this, void 0, void 0, function* () { - const reqOptions = this.prepareRequestOptions(); - let opRes; - let result; - opRes = yield this._httpClient.sendRequest(reqOptions); - result = this.parseTokenResponse(opRes.bodyAsText); - if (!result.tokenType) { - throw new Error(`Invalid token response, did not find tokenType. Response body is: ${opRes.bodyAsText}`); - } - else if (!result.accessToken) { - throw new Error(`Invalid token response, did not find accessToken. Response body is: ${opRes.bodyAsText}`); - } - return result; - }); + CookieJar.deserialize(serialized, newStore, cb); + }); +}; + +CAN_BE_SYNC.push('removeAllCookies'); +CookieJar.prototype.removeAllCookies = function(cb) { + var store = this.store; + + // Check that the store implements its own removeAllCookies(). The default + // implementation in Store will immediately call the callback with a "not + // implemented" Error. + if (store.removeAllCookies instanceof Function && + store.removeAllCookies !== Store.prototype.removeAllCookies) + { + return store.removeAllCookies(cb); + } + + store.getAllCookies(function(err, cookies) { + if (err) { + return cb(err); } - prepareRequestOptions() { - const reqOptions = { - url: this.msiEndpoint, - headers: { - "Content-Type": "application/x-www-form-urlencoded; charset=UTF-8", - "Metadata": "true" - }, - method: this.httpMethod, - queryParameters: { - "api-version": this.apiVersion, - "resource": this.resource, - "object_id": this.objectId, - "client_id": this.clientId, - "mi_res_id": this.identityId - } - }; - const webResource = new ms_rest_js_1.WebResource(); - return webResource.prepare(reqOptions); + + if (cookies.length === 0) { + return cb(null); } -} -exports.MSIVmTokenCredentials = MSIVmTokenCredentials; -//# sourceMappingURL=msiVmTokenCredentials.js.map -/***/ }), + var completedCount = 0; + var removeErrors = []; -/***/ 5558: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + function removeCookieCb(removeErr) { + if (removeErr) { + removeErrors.push(removeErr); + } -"use strict"; + completedCount++; -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); + if (completedCount === cookies.length) { + return cb(removeErrors.length ? removeErrors[0] : null); + } + } + + cookies.forEach(function(cookie) { + store.removeCookie(cookie.domain, cookie.path, cookie.key, removeCookieCb); }); + }); }; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const ms_rest_js_1 = __nccwpck_require__(30812); -const ms_rest_azure_env_1 = __nccwpck_require__(40787); -const adal_node_1 = __nccwpck_require__(35894); -class TokenCredentialsBase { - constructor(clientId, domain, tokenAudience, environment = ms_rest_azure_env_1.Environment.AzureCloud, tokenCache = new adal_node_1.MemoryCache()) { - this.clientId = clientId; - this.domain = domain; - this.tokenAudience = tokenAudience; - this.environment = environment; - this.tokenCache = tokenCache; - if (!clientId || typeof clientId.valueOf() !== "string") { - throw new Error("clientId must be a non empty string."); - } - if (!domain || typeof domain.valueOf() !== "string") { - throw new Error("domain must be a non empty string."); - } - if (this.tokenAudience === "graph" && this.domain.toLowerCase() === "common") { - throw new Error(`${"If the tokenAudience is specified as \"graph\" then \"domain\" cannot be defaulted to \"commmon\" tenant.\ - It must be the actual tenant (preferrably a string in a guid format)."}`); - } - const authorityUrl = this.environment.activeDirectoryEndpointUrl + this.domain; - this.authContext = new adal_node_1.AuthenticationContext(authorityUrl, this.environment.validateAuthority, this.tokenCache); - } - getActiveDirectoryResourceId() { - let resource = this.environment.activeDirectoryResourceId; - if (this.tokenAudience) { - resource = this.tokenAudience; - if (this.tokenAudience.toLowerCase() === "graph") { - resource = this.environment.activeDirectoryGraphResourceId; - } - else if (this.tokenAudience.toLowerCase() === "batch") { - resource = this.environment.batchResourceId; - } - } - return resource; - } - getTokenFromCache(username) { - const self = this; - const resource = this.getActiveDirectoryResourceId(); - return new Promise((resolve, reject) => { - self.authContext.acquireToken(resource, username, self.clientId, (error, tokenResponse) => { - if (error) { - return reject(error); - } - if (tokenResponse.error || tokenResponse.errorDescription) { - return reject(tokenResponse); - } - return resolve(tokenResponse); - }); - }); + +CookieJar.prototype._cloneSync = syncWrap('clone'); +CookieJar.prototype.cloneSync = function(newStore) { + if (!newStore.synchronous) { + throw new Error('CookieJar clone destination store is not synchronous; use async API instead.'); + } + return this._cloneSync(newStore); +}; + +// Use a closure to provide a true imperative API for synchronous stores. +function syncWrap(method) { + return function() { + if (!this.store.synchronous) { + throw new Error('CookieJar store is not synchronous; use async API instead.'); } - /** - * Signs a request with the Authentication header. - * - * @param {webResource} The WebResource to be signed. - * @param {function(error)} callback The callback function. - * @return {undefined} - */ - signRequest(webResource) { - return __awaiter(this, void 0, void 0, function* () { - const tokenResponse = yield this.getToken(); - webResource.headers.set(ms_rest_js_1.Constants.HeaderConstants.AUTHORIZATION, `${tokenResponse.tokenType} ${tokenResponse.accessToken}`); - return Promise.resolve(webResource); - }); + + var args = Array.prototype.slice.call(arguments); + var syncErr, syncResult; + args.push(function syncCb(err, result) { + syncErr = err; + syncResult = result; + }); + this[method].apply(this, args); + + if (syncErr) { + throw syncErr; } + return syncResult; + }; } -exports.TokenCredentialsBase = TokenCredentialsBase; -//# sourceMappingURL=tokenCredentialsBase.js.map + +// wrap all declared CAN_BE_SYNC methods in the sync wrapper +CAN_BE_SYNC.forEach(function(method) { + CookieJar.prototype[method+'Sync'] = syncWrap(method); +}); + +exports.version = VERSION; +exports.CookieJar = CookieJar; +exports.Cookie = Cookie; +exports.Store = Store; +exports.MemoryCookieStore = MemoryCookieStore; +exports.parseDate = parseDate; +exports.formatDate = formatDate; +exports.parse = parse; +exports.fromJSON = fromJSON; +exports.domainMatch = domainMatch; +exports.defaultPath = defaultPath; +exports.pathMatch = pathMatch; +exports.getPublicSuffix = pubsuffix.getPublicSuffix; +exports.cookieCompare = cookieCompare; +exports.permuteDomain = __nccwpck_require__(91287).permuteDomain; +exports.permutePath = permutePath; +exports.canonicalDomain = canonicalDomain; + /***/ }), -/***/ 14502: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 96868: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/*! + * Copyright (c) 2015, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - return new (P || (P = Promise))(function (resolve, reject) { +var Store = __nccwpck_require__(5340)/* .Store */ .y; +var permuteDomain = __nccwpck_require__(91287).permuteDomain; +var pathMatch = __nccwpck_require__(55319)/* .pathMatch */ .U; +var util = __nccwpck_require__(31669); + +function MemoryCookieStore() { + Store.call(this); + this.idx = {}; +} +util.inherits(MemoryCookieStore, Store); +exports.m = MemoryCookieStore; +MemoryCookieStore.prototype.idx = null; + +// Since it's just a struct in RAM, this Store is synchronous +MemoryCookieStore.prototype.synchronous = true; + +// force a default depth: +MemoryCookieStore.prototype.inspect = function() { + return "{ idx: "+util.inspect(this.idx, false, 2)+' }'; +}; + +// Use the new custom inspection symbol to add the custom inspect function if +// available. +if (util.inspect.custom) { + MemoryCookieStore.prototype[util.inspect.custom] = MemoryCookieStore.prototype.inspect; +} + +MemoryCookieStore.prototype.findCookie = function(domain, path, key, cb) { + if (!this.idx[domain]) { + return cb(null,undefined); + } + if (!this.idx[domain][path]) { + return cb(null,undefined); + } + return cb(null,this.idx[domain][path][key]||null); +}; + +MemoryCookieStore.prototype.findCookies = function(domain, path, cb) { + var results = []; + if (!domain) { + return cb(null,[]); + } + + var pathMatcher; + if (!path) { + // null means "all paths" + pathMatcher = function matchAll(domainIndex) { + for (var curPath in domainIndex) { + var pathIndex = domainIndex[curPath]; + for (var key in pathIndex) { + results.push(pathIndex[key]); + } + } + }; + + } else { + pathMatcher = function matchRFC(domainIndex) { + //NOTE: we should use path-match algorithm from S5.1.4 here + //(see : https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/canonical_cookie.cc#L299) + Object.keys(domainIndex).forEach(function (cookiePath) { + if (pathMatch(path, cookiePath)) { + var pathIndex = domainIndex[cookiePath]; + + for (var key in pathIndex) { + results.push(pathIndex[key]); + } + } + }); + }; + } + + var domains = permuteDomain(domain) || [domain]; + var idx = this.idx; + domains.forEach(function(curDomain) { + var domainIndex = idx[curDomain]; + if (!domainIndex) { + return; + } + pathMatcher(domainIndex); + }); + + cb(null,results); +}; + +MemoryCookieStore.prototype.putCookie = function(cookie, cb) { + if (!this.idx[cookie.domain]) { + this.idx[cookie.domain] = {}; + } + if (!this.idx[cookie.domain][cookie.path]) { + this.idx[cookie.domain][cookie.path] = {}; + } + this.idx[cookie.domain][cookie.path][cookie.key] = cookie; + cb(null); +}; + +MemoryCookieStore.prototype.updateCookie = function(oldCookie, newCookie, cb) { + // updateCookie() may avoid updating cookies that are identical. For example, + // lastAccessed may not be important to some stores and an equality + // comparison could exclude that field. + this.putCookie(newCookie,cb); +}; + +MemoryCookieStore.prototype.removeCookie = function(domain, path, key, cb) { + if (this.idx[domain] && this.idx[domain][path] && this.idx[domain][path][key]) { + delete this.idx[domain][path][key]; + } + cb(null); +}; + +MemoryCookieStore.prototype.removeCookies = function(domain, path, cb) { + if (this.idx[domain]) { + if (path) { + delete this.idx[domain][path]; + } else { + delete this.idx[domain]; + } + } + return cb(null); +}; + +MemoryCookieStore.prototype.removeAllCookies = function(cb) { + this.idx = {}; + return cb(null); +} + +MemoryCookieStore.prototype.getAllCookies = function(cb) { + var cookies = []; + var idx = this.idx; + + var domains = Object.keys(idx); + domains.forEach(function(domain) { + var paths = Object.keys(idx[domain]); + paths.forEach(function(path) { + var keys = Object.keys(idx[domain][path]); + keys.forEach(function(key) { + if (key !== null) { + cookies.push(idx[domain][path][key]); + } + }); + }); + }); + + // Sort by creationIndex so deserializing retains the creation order. + // When implementing your own store, this SHOULD retain the order too + cookies.sort(function(a,b) { + return (a.creationIndex||0) - (b.creationIndex||0); + }); + + cb(null, cookies); +}; + + +/***/ }), + +/***/ 55319: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; +/*! + * Copyright (c) 2015, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +/* + * "A request-path path-matches a given cookie-path if at least one of the + * following conditions holds:" + */ +function pathMatch (reqPath, cookiePath) { + // "o The cookie-path and the request-path are identical." + if (cookiePath === reqPath) { + return true; + } + + var idx = reqPath.indexOf(cookiePath); + if (idx === 0) { + // "o The cookie-path is a prefix of the request-path, and the last + // character of the cookie-path is %x2F ("/")." + if (cookiePath.substr(-1) === "/") { + return true; + } + + // " o The cookie-path is a prefix of the request-path, and the first + // character of the request-path that is not included in the cookie- path + // is a %x2F ("/") character." + if (reqPath.substr(cookiePath.length, 1) === "/") { + return true; + } + } + + return false; +} + +exports.U = pathMatch; + + +/***/ }), + +/***/ 91287: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; +/*! + * Copyright (c) 2015, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +var pubsuffix = __nccwpck_require__(23005); + +// Gives the permutation of all possible domainMatch()es of a given domain. The +// array is in shortest-to-longest order. Handy for indexing. +function permuteDomain (domain) { + var pubSuf = pubsuffix.getPublicSuffix(domain); + if (!pubSuf) { + return null; + } + if (pubSuf == domain) { + return [domain]; + } + + var prefix = domain.slice(0, -(pubSuf.length + 1)); // ".example.com" + var parts = prefix.split('.').reverse(); + var cur = pubSuf; + var permutations = [cur]; + while (parts.length) { + cur = parts.shift() + '.' + cur; + permutations.push(cur); + } + return permutations; +} + +exports.permuteDomain = permuteDomain; + + +/***/ }), + +/***/ 23005: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; +/*! + * Copyright (c) 2018, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +var psl = __nccwpck_require__(29975); + +function getPublicSuffix(domain) { + return psl.get(domain); +} + +exports.getPublicSuffix = getPublicSuffix; + + +/***/ }), + +/***/ 5340: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; +/*! + * Copyright (c) 2015, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +/*jshint unused:false */ + +function Store() { +} +exports.y = Store; + +// Stores may be synchronous, but are still required to use a +// Continuation-Passing Style API. The CookieJar itself will expose a "*Sync" +// API that converts from synchronous-callbacks to imperative style. +Store.prototype.synchronous = false; + +Store.prototype.findCookie = function(domain, path, key, cb) { + throw new Error('findCookie is not implemented'); +}; + +Store.prototype.findCookies = function(domain, path, cb) { + throw new Error('findCookies is not implemented'); +}; + +Store.prototype.putCookie = function(cookie, cb) { + throw new Error('putCookie is not implemented'); +}; + +Store.prototype.updateCookie = function(oldCookie, newCookie, cb) { + // recommended default implementation: + // return this.putCookie(newCookie, cb); + throw new Error('updateCookie is not implemented'); +}; + +Store.prototype.removeCookie = function(domain, path, key, cb) { + throw new Error('removeCookie is not implemented'); +}; + +Store.prototype.removeCookies = function(domain, path, cb) { + throw new Error('removeCookies is not implemented'); +}; + +Store.prototype.removeAllCookies = function(cb) { + throw new Error('removeAllCookies is not implemented'); +} + +Store.prototype.getAllCookies = function(cb) { + throw new Error('getAllCookies is not implemented (therefore jar cannot be serialized)'); +}; + + +/***/ }), + +/***/ 88263: +/***/ ((module) => { + +// generated by genversion +module.exports = '2.5.0' + + +/***/ }), + +/***/ 64305: +/***/ ((module) => { + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +var byteToHex = []; +for (var i = 0; i < 256; ++i) { + byteToHex[i] = (i + 0x100).toString(16).substr(1); +} + +function bytesToUuid(buf, offset) { + var i = offset || 0; + var bth = byteToHex; + // join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4 + return ([ + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]] + ]).join(''); +} + +module.exports = bytesToUuid; + + +/***/ }), + +/***/ 7968: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +// Unique ID creation requires a high quality random # generator. In node.js +// this is pretty straight-forward - we use the crypto API. + +var crypto = __nccwpck_require__(76417); + +module.exports = function nodeRNG() { + return crypto.randomBytes(16); +}; + + +/***/ }), + +/***/ 20621: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var rng = __nccwpck_require__(7968); +var bytesToUuid = __nccwpck_require__(64305); + +function v4(options, buf, offset) { + var i = buf && offset || 0; + + if (typeof(options) == 'string') { + buf = options === 'binary' ? new Array(16) : null; + options = null; + } + options = options || {}; + + var rnds = options.random || (options.rng || rng)(); + + // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + rnds[6] = (rnds[6] & 0x0f) | 0x40; + rnds[8] = (rnds[8] & 0x3f) | 0x80; + + // Copy bytes to buffer, if provided + if (buf) { + for (var ii = 0; ii < 16; ++ii) { + buf[i + ii] = rnds[ii]; + } + } + + return buf || bytesToUuid(rnds); +} + +module.exports = v4; + + +/***/ }), + +/***/ 2265: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } @@ -8395,86 +8501,180 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); -const tokenCredentialsBase_1 = __nccwpck_require__(5558); -class UserTokenCredentials extends tokenCredentialsBase_1.TokenCredentialsBase { +const fs_1 = __nccwpck_require__(35747); +const crypto_1 = __nccwpck_require__(76417); +const applicationTokenCredentialsBase_1 = __nccwpck_require__(60968); +const authConstants_1 = __nccwpck_require__(26700); +class ApplicationTokenCertificateCredentials extends applicationTokenCredentialsBase_1.ApplicationTokenCredentialsBase { /** - * Creates a new UserTokenCredentials object. - * + * Creates a new ApplicationTokenCredentials object. + * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} + * for detailed instructions on creating an Azure Active Directory application. * @constructor * @param {string} clientId The active directory application client id. - * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} - * for an example. * @param {string} domain The domain or tenant id containing this application. - * @param {string} username The user name for the Organization Id account. - * @param {string} password The password for the Organization Id account. + * @param {string} certificate A PEM encoded certificate private key. + * @param {string} thumbprint A hex encoded thumbprint of the certificate. * @param {string} [tokenAudience] The audience for which the token is requested. Valid values are 'graph', 'batch', or any other resource like 'https://vault.azure.net/'. * If tokenAudience is 'graph' then domain should also be provided and its value should not be the default 'common' tenant. It must be a string (preferrably in a guid format). * @param {Environment} [environment] The azure environment to authenticate with. * @param {object} [tokenCache] The token cache. Default value is the MemoryCache object from adal. */ - constructor(clientId, domain, username, password, tokenAudience, environment, tokenCache) { - if (!clientId || typeof clientId.valueOf() !== "string") { - throw new Error("clientId must be a non empty string."); + constructor(clientId, domain, certificate, thumbprint, tokenAudience, environment, tokenCache) { + if (!certificate || typeof certificate.valueOf() !== "string") { + throw new Error("certificate must be a non empty string."); } - if (!domain || typeof domain.valueOf() !== "string") { - throw new Error("domain must be a non empty string."); + if (!thumbprint || typeof thumbprint.valueOf() !== "string") { + throw new Error("thumbprint must be a non empty string."); } - if (!username || typeof username.valueOf() !== "string") { - throw new Error("username must be a non empty string."); + super(clientId, domain, tokenAudience, environment, tokenCache); + this.certificate = certificate; + this.thumbprint = thumbprint; + } + /** + * Tries to get the token from cache initially. If that is unsuccessfull then it tries to get the token from ADAL. + * @returns {Promise} A promise that resolves to TokenResponse and rejects with an Error. + */ + getToken() { + return __awaiter(this, void 0, void 0, function* () { + try { + const tokenResponse = yield this.getTokenFromCache(); + return tokenResponse; + } + catch (error) { + if (error.message.startsWith(authConstants_1.AuthConstants.SDK_INTERNAL_ERROR)) { + return Promise.reject(error); + } + return new Promise((resolve, reject) => { + const resource = this.getActiveDirectoryResourceId(); + this.authContext.acquireTokenWithClientCertificate(resource, this.clientId, this.certificate, this.thumbprint, (error, tokenResponse) => { + if (error) { + return reject(error); + } + if (tokenResponse.error || tokenResponse.errorDescription) { + return reject(tokenResponse); + } + return resolve(tokenResponse); + }); + }); + } + }); + } + /** + * Creates a new instance of ApplicationTokenCertificateCredentials. + * + * @param clientId The active directory application client id also known as the SPN (ServicePrincipal Name). + * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} + * for an example. + * @param {string} certificateStringOrFilePath A PEM encoded certificate and private key OR an absolute filepath to the .pem file containing that information. For example: + * - CertificateString: "-----BEGIN PRIVATE KEY-----\n\n-----END PRIVATE KEY-----\n-----BEGIN CERTIFICATE-----\n\n-----END CERTIFICATE-----\n" + * - CertificateFilePath: **Absolute** file path of the .pem file. + * @param domain The domain or tenant id containing this application. + * @param options AzureTokenCredentialsOptions - Object representing optional parameters. + * + * @returns ApplicationTokenCertificateCredentials + */ + static create(clientId, certificateStringOrFilePath, domain, options) { + if (!certificateStringOrFilePath || + typeof certificateStringOrFilePath.valueOf() !== "string") { + throw new Error("'certificateStringOrFilePath' must be a non empty string."); } - if (!password || typeof password.valueOf() !== "string") { - throw new Error("password must be a non empty string."); + if (!certificateStringOrFilePath.startsWith("-----BEGIN")) { + certificateStringOrFilePath = fs_1.readFileSync(certificateStringOrFilePath, "utf8"); } - super(clientId, domain, tokenAudience, environment, tokenCache); - this.username = username; - this.password = password; + const certificatePattern = /(-+BEGIN CERTIFICATE-+)(\n\r?|\r\n?)([A-Za-z0-9\+\/\n\r]+\=*)(\n\r?|\r\n?)(-+END CERTIFICATE-+)/; + const matchCert = certificateStringOrFilePath.match(certificatePattern); + const rawCertificate = matchCert ? matchCert[3] : ""; + if (!rawCertificate) { + throw new Error("Unable to correctly parse the certificate from the value provided in 'certificateStringOrFilePath' "); + } + const thumbprint = crypto_1.createHash("sha1") + .update(Buffer.from(rawCertificate, "base64")) + .digest("hex"); + return new ApplicationTokenCertificateCredentials(clientId, domain, certificateStringOrFilePath, thumbprint, options.tokenAudience, options.environment, options.tokenCache); } - crossCheckUserNameWithToken(username, userIdFromToken) { - // to maintain the casing consistency between "azureprofile.json" and token cache. (RD 1996587) - // use the "userId" here, which should be the same with "username" except the casing. - return (username.toLowerCase() === userIdFromToken.toLowerCase()); +} +exports.ApplicationTokenCertificateCredentials = ApplicationTokenCertificateCredentials; +//# sourceMappingURL=applicationTokenCertificateCredentials.js.map + +/***/ }), + +/***/ 35216: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const applicationTokenCredentialsBase_1 = __nccwpck_require__(60968); +const authConstants_1 = __nccwpck_require__(26700); +class ApplicationTokenCredentials extends applicationTokenCredentialsBase_1.ApplicationTokenCredentialsBase { + /** + * Creates a new ApplicationTokenCredentials object. + * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} + * for detailed instructions on creating an Azure Active Directory application. + * @constructor + * @param {string} clientId The active directory application client id. + * @param {string} domain The domain or tenant id containing this application. + * @param {string} secret The authentication secret for the application. + * @param {string} [tokenAudience] The audience for which the token is requested. Valid values are 'graph', 'batch', or any other resource like 'https://vault.azure.net/'. + * If tokenAudience is 'graph' then domain should also be provided and its value should not be the default 'common' tenant. It must be a string (preferrably in a guid format). + * @param {Environment} [environment] The azure environment to authenticate with. + * @param {object} [tokenCache] The token cache. Default value is the MemoryCache object from adal. + */ + constructor(clientId, domain, secret, tokenAudience, environment, tokenCache) { + if (!secret || typeof secret.valueOf() !== "string") { + throw new Error("secret must be a non empty string."); + } + super(clientId, domain, tokenAudience, environment, tokenCache); + this.secret = secret; } /** - * Tries to get the token from cache initially. If that is unsuccessful then it tries to get the token from ADAL. - * @returns {Promise} - * {object} [tokenResponse] The tokenResponse (tokenType and accessToken are the two important properties). - * @memberof UserTokenCredentials + * Tries to get the token from cache initially. If that is unsuccessfull then it tries to get the token from ADAL. + * @returns {Promise} A promise that resolves to TokenResponse and rejects with an Error. */ getToken() { return __awaiter(this, void 0, void 0, function* () { try { - return yield this.getTokenFromCache(this.username); + const tokenResponse = yield this.getTokenFromCache(); + return tokenResponse; } catch (error) { - const self = this; + if (error.message && + error.message.startsWith(authConstants_1.AuthConstants.SDK_INTERNAL_ERROR)) { + return Promise.reject(error); + } const resource = this.getActiveDirectoryResourceId(); return new Promise((resolve, reject) => { - self.authContext.acquireTokenWithUsernamePassword(resource, self.username, self.password, self.clientId, (error, tokenResponse) => { + this.authContext.acquireTokenWithClientCredentials(resource, this.clientId, this.secret, (error, tokenResponse) => { if (error) { return reject(error); } if (tokenResponse.error || tokenResponse.errorDescription) { return reject(tokenResponse); } - tokenResponse = tokenResponse; - if (self.crossCheckUserNameWithToken(self.username, tokenResponse.userId)) { - return resolve(tokenResponse); - } - else { - return reject(`The userId "${tokenResponse.userId}" in access token doesn"t match the username "${self.username}" provided during authentication.`); - } + return resolve(tokenResponse); }); }); } }); } } -exports.UserTokenCredentials = UserTokenCredentials; -//# sourceMappingURL=userTokenCredentials.js.map +exports.ApplicationTokenCredentials = ApplicationTokenCredentials; +//# sourceMappingURL=applicationTokenCredentials.js.map /***/ }), -/***/ 29325: +/***/ 60968: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -8490,175 +8690,1175 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); -const adal = __nccwpck_require__(35894); -const msRest = __nccwpck_require__(30812); -const child_process_1 = __nccwpck_require__(63129); -const fs_1 = __nccwpck_require__(35747); -const ms_rest_azure_env_1 = __nccwpck_require__(40787); -const applicationTokenCredentials_1 = __nccwpck_require__(35216); -const applicationTokenCertificateCredentials_1 = __nccwpck_require__(2265); -const deviceTokenCredentials_1 = __nccwpck_require__(80450); -const userTokenCredentials_1 = __nccwpck_require__(14502); +const tokenCredentialsBase_1 = __nccwpck_require__(5558); const authConstants_1 = __nccwpck_require__(26700); -const subscriptionUtils_1 = __nccwpck_require__(47813); -const msiVmTokenCredentials_1 = __nccwpck_require__(73287); -const msiAppServiceTokenCredentials_1 = __nccwpck_require__(68376); -/** - * @constant {Array} managementPlaneTokenAudiences - Urls for management plane token - * audience across different azure environments. - */ -const managementPlaneTokenAudiences = [ - "https://management.core.windows.net/", - "https://management.core.chinacloudapi.cn/", - "https://management.core.usgovcloudapi.net/", - "https://management.core.cloudapi.de/", - "https://management.azure.com/", - "https://management.core.windows.net", - "https://management.core.chinacloudapi.cn", - "https://management.core.usgovcloudapi.net", - "https://management.core.cloudapi.de", - "https://management.azure.com" -]; -function turnOnLogging() { - const log = adal.Logging; - log.setLoggingOptions({ - level: 3, - log: function (level, message, error) { - level; - console.info(message); - if (error) { - console.error(error); +class ApplicationTokenCredentialsBase extends tokenCredentialsBase_1.TokenCredentialsBase { + /** + * Creates a new ApplicationTokenCredentials object. + * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} + * for detailed instructions on creating an Azure Active Directory application. + * @constructor + * @param {string} clientId The active directory application client id. + * @param {string} domain The domain or tenant id containing this application. + * @param {string} [tokenAudience] The audience for which the token is requested. Valid values are 'graph', 'batch', or any other resource like 'https://vault.azure.net/'. + * If tokenAudience is 'graph' then domain should also be provided and its value should not be the default 'common' tenant. It must be a string (preferrably in a guid format). + * @param {Environment} [environment] The azure environment to authenticate with. + * @param {object} [tokenCache] The token cache. Default value is the MemoryCache object from adal. + */ + constructor(clientId, domain, tokenAudience, environment, tokenCache) { + super(clientId, domain, tokenAudience, environment, tokenCache); + } + getTokenFromCache() { + const _super = Object.create(null, { + getTokenFromCache: { get: () => super.getTokenFromCache } + }); + return __awaiter(this, void 0, void 0, function* () { + const self = this; + // a thin wrapper over the base implementation. try get token from cache, additionaly clean up cache if required. + try { + const tokenResponse = yield _super.getTokenFromCache.call(this, undefined); + return Promise.resolve(tokenResponse); } - } - }); -} -if (process.env["AZURE_ADAL_LOGGING_ENABLED"]) { - turnOnLogging(); -} -/** - * Provides a UserTokenCredentials object and the list of subscriptions associated with that userId across all the applicable tenants. - * This method is applicable only for organizational ids that are not 2FA enabled otherwise please use interactive login. - * - * @param {string} username The user name for the Organization Id account. - * @param {string} password The password for the Organization Id account. - * @param {object} [options] Object representing optional parameters. - * @param {string} [options.clientId] The active directory application client id. - * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} - * for an example. - * @param {string} [options.tokenAudience] The audience for which the token is requested. Valid values are 'graph', 'batch', or any other resource like 'https://vault.azure.net/'. - * If tokenAudience is 'graph' then domain should also be provided and its value should not be the default 'common' tenant. It must be a string (preferrably in a guid format). - * @param {string} [options.domain] The domain or tenant id containing this application. Default value "common". - * @param {Environment} [options.environment] The azure environment to authenticate with. - * @param {object} [options.tokenCache] The token cache. Default value is the MemoryCache object from adal. - * - * @returns {Promise} A Promise that resolves to AuthResponse that contains "credentials" and optional "subscriptions" array and rejects with an Error. - */ -function withUsernamePasswordWithAuthResponse(username, password, options) { - return __awaiter(this, void 0, void 0, function* () { - if (!options) { - options = {}; - } - if (!options.clientId) { - options.clientId = authConstants_1.AuthConstants.DEFAULT_ADAL_CLIENT_ID; - } - if (!options.domain) { - options.domain = authConstants_1.AuthConstants.AAD_COMMON_TENANT; - } - if (!options.environment) { - options.environment = ms_rest_azure_env_1.Environment.AzureCloud; - } - let creds; - let tenantList = []; - let subscriptionList = []; - try { - creds = new userTokenCredentials_1.UserTokenCredentials(options.clientId, options.domain, username, password, options.tokenAudience, options.environment); - yield creds.getToken(); - // The token cache gets propulated for all the tenants as a part of building the tenantList. - tenantList = yield subscriptionUtils_1.buildTenantList(creds); - subscriptionList = yield _getSubscriptions(creds, tenantList, options.tokenAudience); - } - catch (err) { - return Promise.reject(err); - } - return Promise.resolve({ credentials: creds, subscriptions: subscriptionList }); - }); + catch (error) { + // Remove the stale token from the tokencache. ADAL gives the same error message "Entry not found in cache." + // for entry not being present in the cache and for accessToken being expired in the cache. We do not want the token cache + // to contain the expired token, we clean it up here. + const status = yield self.removeInvalidItemsFromCache({ + _clientId: self.clientId + }); + if (status.result) { + return Promise.reject(error); + } + const message = status && status.details && status.details.message + ? status.details.message + : status.details; + return Promise.reject(new Error(authConstants_1.AuthConstants.SDK_INTERNAL_ERROR + + " : " + + "critical failure while removing expired token for service principal from token cache. " + + message)); + } + }); + } + /** + * Removes invalid items from token cache. This method is different. Here we never reject in case of error. + * Rather we resolve with an object that says the result is false and error information is provided in + * the details property of the resolved object. This is done to do better error handling in the above function + * where removeInvalidItemsFromCache() is called. + * @param {object} query The query to be used for finding the token for service principal from the cache + * @returns {result: boolean, details?: Error} resultObject with more info. + */ + removeInvalidItemsFromCache(query) { + const self = this; + return new Promise(resolve => { + self.tokenCache.find(query, (error, entries) => { + if (error) { + return resolve({ result: false, details: error }); + } + if (entries && entries.length > 0) { + return new Promise(resolve => { + return self.tokenCache.remove(entries, (err) => { + if (err) { + return resolve({ result: false, details: err }); + } + return resolve({ result: true }); + }); + }); + } + else { + return resolve({ result: true }); + } + }); + }); + } } -exports.withUsernamePasswordWithAuthResponse = withUsernamePasswordWithAuthResponse; -/** - * Provides an ApplicationTokenCredentials object and the list of subscriptions associated with that servicePrinicpalId/clientId across all the applicable tenants. - * - * @param {string} clientId The active directory application client id also known as the SPN (ServicePrincipal Name). - * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} - * for an example. - * @param {string} secret The application secret for the service principal. - * @param {string} domain The domain or tenant id containing this application. - * @param {object} [options] Object representing optional parameters. - * @param {string} [options.tokenAudience] The audience for which the token is requested. Valid values are 'graph', 'batch', or any other resource like 'https://vault.azure.net/'. - * If tokenAudience is 'graph' then domain should also be provided and its value should not be the default 'common' tenant. It must be a string (preferrably in a guid format). - * @param {Environment} [options.environment] The azure environment to authenticate with. - * @param {object} [options.tokenCache] The token cache. Default value is the MemoryCache object from adal. - * - * @returns {Promise} A Promise that resolves to AuthResponse that contains "credentials" and optional "subscriptions" array and rejects with an Error. - */ -function withServicePrincipalSecretWithAuthResponse(clientId, secret, domain, options) { - return __awaiter(this, void 0, void 0, function* () { - if (!options) { - options = {}; - } - if (!options.environment) { - options.environment = ms_rest_azure_env_1.Environment.AzureCloud; - } - let creds; - let subscriptionList = []; - try { - creds = new applicationTokenCredentials_1.ApplicationTokenCredentials(clientId, domain, secret, options.tokenAudience, options.environment); - yield creds.getToken(); - subscriptionList = yield _getSubscriptions(creds, [domain], options.tokenAudience); - } - catch (err) { - return Promise.reject(err); - } - return Promise.resolve({ credentials: creds, subscriptions: subscriptionList }); +exports.ApplicationTokenCredentialsBase = ApplicationTokenCredentialsBase; +//# sourceMappingURL=applicationTokenCredentialsBase.js.map + +/***/ }), + +/***/ 38909: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); }); -} -exports.withServicePrincipalSecretWithAuthResponse = withServicePrincipalSecretWithAuthResponse; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const ms_rest_js_1 = __nccwpck_require__(30812); +const login_1 = __nccwpck_require__(29325); /** - * Provides an ApplicationTokenCertificateCredentials object and the list of subscriptions associated with that servicePrinicpalId/clientId across all the applicable tenants. - * - * @param {string} clientId The active directory application client id also known as the SPN (ServicePrincipal Name). - * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} - * for an example. - * @param {string} certificateStringOrFilePath A PEM encoded certificate and private key OR an absolute filepath to the .pem file containing that information. For example: - * - CertificateString: "-----BEGIN PRIVATE KEY-----\n\n-----END PRIVATE KEY-----\n-----BEGIN CERTIFICATE-----\n\n-----END CERTIFICATE-----\n" - * - CertificateFilePath: **Absolute** file path of the .pem file. - * @param {string} domain The domain or tenant id containing this application. - * @param {object} [options] Object representing optional parameters. - * @param {string} [options.tokenAudience] The audience for which the token is requested. Valid values are 'graph', 'batch', or any other resource like 'https://vault.azure.net/'. - * If tokenAudience is 'graph' then domain should also be provided and its value should not be the default 'common' tenant. It must be a string (preferrably in a guid format). - * @param {Environment} [options.environment] The azure environment to authenticate with. - * @param {object} [options.tokenCache] The token cache. Default value is the MemoryCache object from adal. - * - * @returns {Promise} A Promise that resolves to AuthResponse that contains "credentials" and optional "subscriptions" array and rejects with an Error. + * Describes the credentials by retrieving token via Azure CLI. */ -function withServicePrincipalCertificateWithAuthResponse(clientId, certificateStringOrFilePath, domain, options) { - return __awaiter(this, void 0, void 0, function* () { - if (!options) { - options = {}; - } - if (!options.environment) { - options.environment = ms_rest_azure_env_1.Environment.AzureCloud; +class AzureCliCredentials { + constructor(subscriptionInfo, tokenInfo, + // tslint:disable-next-line: no-inferrable-types + resource = "https://management.azure.com") { + /** + * Azure resource endpoints. + * - Defaults to Azure Resource Manager from environment: AzureCloud. "https://management.azure.com" + * - For Azure KeyVault: "https://vault.azure.net" + * - For Azure Batch: "https://batch.core.windows.net" + * - For Azure Active Directory Graph: "https://graph.windows.net" + * + * To get the resource for other clouds: + * - `az cloud list` + */ + // tslint:disable-next-line: no-inferrable-types + this.resource = "https://management.azure.com"; + /** + * The number of seconds within which it is good to renew the token. + * A constant set to 270 seconds (4.5 minutes). + */ + this._tokenRenewalMarginInSeconds = 270; + this.subscriptionInfo = subscriptionInfo; + this.tokenInfo = tokenInfo; + this.resource = resource; + } + /** + * Tries to get the new token from Azure CLI, if the token has expired or the subscription has + * changed else uses the cached accessToken. + * @return The tokenResponse (tokenType and accessToken are the two important properties). + */ + getToken() { + return __awaiter(this, void 0, void 0, function* () { + if (this._hasTokenExpired() || this._hasSubscriptionChanged() || this._hasResourceChanged()) { + try { + // refresh the access token + this.tokenInfo = yield AzureCliCredentials.getAccessToken({ + subscriptionIdOrName: this.subscriptionInfo.id, + resource: this.resource + }); + } + catch (err) { + throw new Error(`An error occurred while refreshing the new access ` + + `token:${err.stderr ? err.stderr : err.message}`); + } + } + const result = { + accessToken: this.tokenInfo.accessToken, + tokenType: this.tokenInfo.tokenType, + expiresOn: this.tokenInfo.expiresOn, + tenantId: this.tokenInfo.tenant + }; + return result; + }); + } + /** + * Signs a request with the Authentication header. + * @param The request to be signed. + */ + signRequest(webResource) { + return __awaiter(this, void 0, void 0, function* () { + const tokenResponse = yield this.getToken(); + webResource.headers.set(ms_rest_js_1.Constants.HeaderConstants.AUTHORIZATION, `${tokenResponse.tokenType} ${tokenResponse.accessToken}`); + return Promise.resolve(webResource); + }); + } + _hasTokenExpired() { + let result = true; + const now = Math.floor(Date.now() / 1000); + if (this.tokenInfo.expiresOn && + this.tokenInfo.expiresOn instanceof Date && + Math.floor(this.tokenInfo.expiresOn.getTime() / 1000) - now > this._tokenRenewalMarginInSeconds) { + result = false; } - let creds; - let subscriptionList = []; + return result; + } + _hasSubscriptionChanged() { + return this.subscriptionInfo.id !== this.tokenInfo.subscription; + } + _parseToken() { try { - creds = applicationTokenCertificateCredentials_1.ApplicationTokenCertificateCredentials.create(clientId, certificateStringOrFilePath, domain, options); - yield creds.getToken(); - subscriptionList = yield _getSubscriptions(creds, [domain], options.tokenAudience); + const base64Url = this.tokenInfo.accessToken.split(".")[1]; + const base64 = decodeURIComponent(Buffer.from(base64Url, "base64").toString("binary").split("").map((c) => { + return "%" + ("00" + c.charCodeAt(0).toString(16)).slice(-2); + }).join("")); + return JSON.parse(base64); } catch (err) { - return Promise.reject(err); + const msg = `An error occurred while parsing the access token: ${err.stack}`; + throw new Error(msg); } - return Promise.resolve({ credentials: creds, subscriptions: subscriptionList }); + } + _isAzureResourceManagerEndpoint(newResource, currentResource) { + if (newResource.endsWith("/")) + newResource = newResource.slice(0, -1); + if (currentResource.endsWith("/")) + currentResource = currentResource.slice(0, -1); + return (newResource === "https://management.core.windows.net" && + currentResource === "https://management.azure.com") || + (newResource === "https://management.azure.com" && + currentResource === "https://management.core.windows.net"); + } + _hasResourceChanged() { + const parsedToken = this._parseToken(); + // normalize the resource string, since it is possible to + // provide a resource without a trailing slash + const currentResource = parsedToken.aud && parsedToken.aud.endsWith("/") + ? parsedToken.aud.slice(0, -1) + : parsedToken.aud; + const newResource = this.resource.endsWith("/") + ? this.resource.slice(0, -1) + : this.resource; + const result = this._isAzureResourceManagerEndpoint(newResource, currentResource) + ? false + : currentResource !== newResource; + return result; + } + /** + * Gets the access token for the default or specified subscription. + * @param options Optional parameters that can be provided to get the access token. + */ + static getAccessToken(options = {}) { + return __awaiter(this, void 0, void 0, function* () { + try { + let cmd = "account get-access-token"; + if (options.subscriptionIdOrName) { + cmd += ` -s "${options.subscriptionIdOrName}"`; + } + if (options.resource) { + cmd += ` --resource ${options.resource}`; + } + const result = yield login_1.execAz(cmd); + result.expiresOn = new Date(result.expiresOn); + return result; + } + catch (err) { + const message = `An error occurred while getting credentials from ` + + `Azure CLI: ${err.stack}`; + throw new Error(message); + } + }); + } + /** + * Gets the subscription from Azure CLI. + * @param subscriptionIdOrName - The name or id of the subscription for which the information is + * required. + */ + static getSubscription(subscriptionIdOrName) { + return __awaiter(this, void 0, void 0, function* () { + if (subscriptionIdOrName && (typeof subscriptionIdOrName !== "string" || !subscriptionIdOrName.length)) { + throw new Error("'subscriptionIdOrName' must be a non-empty string."); + } + try { + let cmd = "account show"; + if (subscriptionIdOrName) { + cmd += ` -s "${subscriptionIdOrName}"`; + } + const result = yield login_1.execAz(cmd); + return result; + } + catch (err) { + const message = `An error occurred while getting information about the current subscription from ` + + `Azure CLI: ${err.stack}`; + throw new Error(message); + } + }); + } + /** + * Sets the specified subscription as the default subscription for Azure CLI. + * @param subscriptionIdOrName The name or id of the subsciption that needs to be set as the + * default subscription. + */ + static setDefaultSubscription(subscriptionIdOrName) { + return __awaiter(this, void 0, void 0, function* () { + try { + yield login_1.execAz(`account set -s ${subscriptionIdOrName}`); + } + catch (err) { + const message = `An error occurred while setting the current subscription from ` + + `Azure CLI: ${err.stack}`; + throw new Error(message); + } + }); + } + /** + * Returns a list of all the subscriptions from Azure CLI. + * @param options Optional parameters that can be provided while listing all the subcriptions. + */ + static listAllSubscriptions(options = {}) { + return __awaiter(this, void 0, void 0, function* () { + let subscriptionList = []; + try { + let cmd = "account list"; + if (options.all) { + cmd += " --all"; + } + if (options.refresh) { + cmd += "--refresh"; + } + subscriptionList = yield login_1.execAz(cmd); + if (subscriptionList && subscriptionList.length) { + for (const sub of subscriptionList) { + if (sub.cloudName) { + sub.environmentName = sub.cloudName; + delete sub.cloudName; + } + } + } + return subscriptionList; + } + catch (err) { + const message = `An error occurred while getting a list of all the subscription from ` + + `Azure CLI: ${err.stack}`; + throw new Error(message); + } + }); + } + /** + * Provides credentials that can be used by the JS SDK to interact with Azure via azure cli. + * **Pre-requisite** + * - **install azure-cli** . For more information see + * {@link https://docs.microsoft.com/en-us/cli/azure/install-azure-cli?view=azure-cli-latest Install Azure CLI} + * - **login via `az login`** + * @param options - Optional parameters that can be provided while creating AzureCliCredentials. + */ + static create(options = {}) { + return __awaiter(this, void 0, void 0, function* () { + const [subscriptinInfo, accessToken] = yield Promise.all([ + AzureCliCredentials.getSubscription(options.subscriptionIdOrName), + AzureCliCredentials.getAccessToken(options) + ]); + return new AzureCliCredentials(subscriptinInfo, accessToken, options.resource); + }); + } +} +exports.AzureCliCredentials = AzureCliCredentials; +//# sourceMappingURL=azureCliCredentials.js.map + +/***/ }), + +/***/ 80450: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tokenCredentialsBase_1 = __nccwpck_require__(5558); +const authConstants_1 = __nccwpck_require__(26700); +class DeviceTokenCredentials extends tokenCredentialsBase_1.TokenCredentialsBase { + /** + * Creates a new DeviceTokenCredentials object that gets a new access token using userCodeInfo (contains user_code, device_code) + * for authenticating user on device. + * + * When this credential is used, the script will provide a url and code. The user needs to copy the url and the code, paste it + * in a browser and authenticate over there. If successful, the script will get the access token. + * + * @constructor + * @param {string} [clientId] The active directory application client id. + * @param {string} [domain] The domain or tenant id containing this application. Default value is "common" + * @param {string} [username] The user name for account in the form: "user@example.com". + * @param {string} [tokenAudience] The audience for which the token is requested. Valid values are 'graph', 'batch', or any other resource like 'https://vault.azure.net/'. + * If tokenAudience is 'graph' then domain should also be provided and its value should not be the default 'common' tenant. It must be a string (preferrably in a guid format). + * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} + * for an example. + * @param {Environment} [environment] The azure environment to authenticate with. Default environment is "Azure" popularly known as "Public Azure Cloud". + * @param {object} [tokenCache] The token cache. Default value is the MemoryCache object from adal. + */ + constructor(clientId, domain, username, tokenAudience, environment, tokenCache) { + if (!username) { + username = "user@example.com"; + } + if (!domain) { + domain = authConstants_1.AuthConstants.AAD_COMMON_TENANT; + } + if (!clientId) { + clientId = authConstants_1.AuthConstants.DEFAULT_ADAL_CLIENT_ID; + } + super(clientId, domain, tokenAudience, environment, tokenCache); + this.username = username; + } + getToken() { + // For device auth, this is just getTokenFromCache. + return this.getTokenFromCache(this.username); + } +} +exports.DeviceTokenCredentials = DeviceTokenCredentials; +//# sourceMappingURL=deviceTokenCredentials.js.map + +/***/ }), + +/***/ 23699: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +Object.defineProperty(exports, "__esModule", ({ value: true })); +const applicationTokenCredentials_1 = __nccwpck_require__(35216); +const applicationTokenCertificateCredentials_1 = __nccwpck_require__(2265); +const deviceTokenCredentials_1 = __nccwpck_require__(80450); +const msiAppServiceTokenCredentials_1 = __nccwpck_require__(68376); +const msiTokenCredentials_1 = __nccwpck_require__(6182); +const msiVmTokenCredentials_1 = __nccwpck_require__(73287); +const tokenCredentialsBase_1 = __nccwpck_require__(5558); +const userTokenCredentials_1 = __nccwpck_require__(14502); +const adal_node_1 = __nccwpck_require__(35894); +function createAuthenticator(credentials) { + const convertedCredentials = _convert(credentials); + const authenticator = _createAuthenticatorMapper(convertedCredentials); + return authenticator; +} +exports.createAuthenticator = createAuthenticator; +function _convert(credentials) { + if (credentials instanceof msiAppServiceTokenCredentials_1.MSIAppServiceTokenCredentials) { + return new msiAppServiceTokenCredentials_1.MSIAppServiceTokenCredentials({ + msiEndpoint: credentials.msiEndpoint, + msiSecret: credentials.msiSecret, + msiApiVersion: credentials.msiApiVersion, + resource: credentials.resource + }); + } + else if (credentials instanceof msiVmTokenCredentials_1.MSIVmTokenCredentials) { + return new msiVmTokenCredentials_1.MSIVmTokenCredentials({ + resource: credentials.resource, + msiEndpoint: credentials.msiEndpoint + }); + } + else if (credentials instanceof msiTokenCredentials_1.MSITokenCredentials) { + throw new Error("MSI-credentials not one of: MSIVmTokenCredentials, MSIAppServiceTokenCredentials"); + } + else { + return credentials; + } +} +function _createAuthenticatorMapper(credentials) { + return (challenge) => new Promise((resolve, reject) => { + // Function to take token Response and format a authorization value + const _formAuthorizationValue = (err, tokenResponse) => { + if (err) { + return reject(err); + } + if (tokenResponse.error) { + return reject(tokenResponse.error); + } + tokenResponse = tokenResponse; + // Calculate the value to be set in the request's Authorization header and resume the call. + const authorizationValue = tokenResponse.tokenType + " " + tokenResponse.accessToken; + return resolve(authorizationValue); + }; + // Create a new authentication context. + if (credentials instanceof tokenCredentialsBase_1.TokenCredentialsBase) { + const context = new adal_node_1.AuthenticationContext(challenge.authorization, true, credentials.authContext && credentials.authContext.cache); + if (credentials instanceof applicationTokenCredentials_1.ApplicationTokenCredentials) { + return context.acquireTokenWithClientCredentials(challenge.resource, credentials.clientId, credentials.secret, _formAuthorizationValue); + } + else if (credentials instanceof applicationTokenCertificateCredentials_1.ApplicationTokenCertificateCredentials) { + return context.acquireTokenWithClientCertificate(challenge.resource, credentials.clientId, credentials.certificate, credentials.thumbprint, _formAuthorizationValue); + } + else if (credentials instanceof userTokenCredentials_1.UserTokenCredentials) { + return context.acquireTokenWithUsernamePassword(challenge.resource, credentials.username, credentials.password, credentials.clientId, _formAuthorizationValue); + } + else if (credentials instanceof deviceTokenCredentials_1.DeviceTokenCredentials) { + return context.acquireToken(challenge.resource, credentials.username, credentials.clientId, _formAuthorizationValue); + } + } + else if (credentials instanceof msiTokenCredentials_1.MSITokenCredentials) { + return credentials.getToken(); + } + else { + return reject(new Error("credentials must be one of: ApplicationTokenCredentials, UserTokenCredentials, " + + "DeviceTokenCredentials, MSITokenCredentials")); + } + }); +} +//# sourceMappingURL=keyVaultFactory.js.map + +/***/ }), + +/***/ 68376: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const msiTokenCredentials_1 = __nccwpck_require__(6182); +const ms_rest_js_1 = __nccwpck_require__(30812); +/** + * @class MSIAppServiceTokenCredentials + */ +class MSIAppServiceTokenCredentials extends msiTokenCredentials_1.MSITokenCredentials { + /** + * Creates an instance of MSIAppServiceTokenCredentials. + * @param {string} [options.msiEndpoint] - The local URL from which your app can request tokens. + * Either provide this parameter or set the environment variable `MSI_ENDPOINT`. + * For example: `MSI_ENDPOINT="http://127.0.0.1:41741/MSI/token/"` + * @param {string} [options.msiSecret] - The secret used in communication between your code and the local MSI agent. + * Either provide this parameter or set the environment variable `MSI_SECRET`. + * For example: `MSI_SECRET="69418689F1E342DD946CB82994CDA3CB"` + * @param {string} [options.resource] - The resource uri or token audience for which the token is needed. + * For e.g. it can be: + * - resource management endpoint "https://management.azure.com/" (default) + * - management endpoint "https://management.core.windows.net/" + * @param {string} [options.msiApiVersion] - The api-version of the local MSI agent. Default value is "2017-09-01". + */ + constructor(options) { + if (!options) + options = {}; + super(options); + options.msiEndpoint = options.msiEndpoint || process.env["MSI_ENDPOINT"]; + options.msiSecret = options.msiSecret || process.env["MSI_SECRET"]; + if (!options.msiEndpoint || (options.msiEndpoint && typeof options.msiEndpoint.valueOf() !== "string")) { + throw new Error('Either provide "msiEndpoint" as a property of the "options" object ' + + 'or set the environment variable "MSI_ENDPOINT" and it must be of type "string".'); + } + if (!options.msiSecret || (options.msiSecret && typeof options.msiSecret.valueOf() !== "string")) { + throw new Error('Either provide "msiSecret" as a property of the "options" object ' + + 'or set the environment variable "MSI_SECRET" and it must be of type "string".'); + } + if (!options.msiApiVersion) { + options.msiApiVersion = "2017-09-01"; + } + else if (typeof options.msiApiVersion.valueOf() !== "string") { + throw new Error("msiApiVersion must be a uri."); + } + this.msiEndpoint = options.msiEndpoint; + this.msiSecret = options.msiSecret; + this.msiApiVersion = options.msiApiVersion; + } + /** + * Prepares and sends a GET request to a service endpoint indicated by the app service, which responds with the access token. + * @return {Promise} Promise with the tokenResponse (tokenType and accessToken are the two important properties). + */ + getToken() { + return __awaiter(this, void 0, void 0, function* () { + const reqOptions = this.prepareRequestOptions(); + let opRes; + let result; + opRes = yield this._httpClient.sendRequest(reqOptions); + if (opRes.bodyAsText === undefined || opRes.bodyAsText.indexOf("ExceptionMessage") !== -1) { + throw new Error(`MSI: Failed to retrieve a token from "${reqOptions.url}" with an error: ${opRes.bodyAsText}`); + } + result = this.parseTokenResponse(opRes.bodyAsText); + if (!result.tokenType) { + throw new Error(`Invalid token response, did not find tokenType. Response body is: ${opRes.bodyAsText}`); + } + else if (!result.accessToken) { + throw new Error(`Invalid token response, did not find accessToken. Response body is: ${opRes.bodyAsText}`); + } + return result; + }); + } + prepareRequestOptions() { + const endpoint = this.msiEndpoint.endsWith("/") ? this.msiEndpoint : `${this.msiEndpoint}/`; + const resource = encodeURIComponent(this.resource); + const getUrl = `${endpoint}?resource=${resource}&api-version=${this.msiApiVersion}`; + const reqOptions = { + url: getUrl, + headers: { + "secret": this.msiSecret + }, + method: "GET" + }; + const webResource = new ms_rest_js_1.WebResource(); + return webResource.prepare(reqOptions); + } +} +exports.MSIAppServiceTokenCredentials = MSIAppServiceTokenCredentials; +//# sourceMappingURL=msiAppServiceTokenCredentials.js.map + +/***/ }), + +/***/ 6182: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const ms_rest_js_1 = __nccwpck_require__(30812); +const authConstants_1 = __nccwpck_require__(26700); +/** + * @class MSITokenCredentials - Provides information about managed service identity token credentials. + * This object can only be used to acquire token on a virtual machine provisioned in Azure with managed service identity. + */ +class MSITokenCredentials { + /** + * Creates an instance of MSITokenCredentials. + * @param {object} [options] - Optional parameters + * @param {string} [options.resource] - The resource uri or token audience for which the token is needed. + * For e.g. it can be: + * - resource management endpoint "https://management.azure.com/"(default) + * - management endpoint "https://management.core.windows.net/" + */ + constructor(options) { + if (!options) + options = {}; + if (!options.resource) { + options.resource = authConstants_1.AuthConstants.RESOURCE_MANAGER_ENDPOINT; + } + else if (typeof options.resource.valueOf() !== "string") { + throw new Error("resource must be a uri."); + } + this.resource = options.resource; + this._httpClient = options.httpClient || new ms_rest_js_1.DefaultHttpClient(); + } + /** + * Parses a tokenResponse json string into a object, and converts properties on the first level to camelCase. + * This method tries to standardize the tokenResponse + * @param {string} body A json string + * @return {object} [tokenResponse] The tokenResponse (tokenType and accessToken are the two important properties). + */ + parseTokenResponse(body) { + // Docs show different examples of possible MSI responses for different services. https://docs.microsoft.com/en-us/azure/active-directory/managed-service-identity/overview + // expires_on - is a Date like string in this doc + // - https://docs.microsoft.com/en-us/azure/app-service/app-service-managed-service-identity#rest-protocol-examples + // In other doc it is stringified number. + // - https://docs.microsoft.com/en-us/azure/active-directory/managed-service-identity/tutorial-linux-vm-access-arm#get-an-access-token-using-the-vms-identity-and-use-it-to-call-resource-manager + const parsedBody = JSON.parse(body); + parsedBody.accessToken = parsedBody["access_token"]; + delete parsedBody["access_token"]; + parsedBody.tokenType = parsedBody["token_type"]; + delete parsedBody["token_type"]; + if (parsedBody["refresh_token"]) { + parsedBody.refreshToken = parsedBody["refresh_token"]; + delete parsedBody["refresh_token"]; + } + if (parsedBody["expires_in"]) { + parsedBody.expiresIn = parsedBody["expires_in"]; + if (typeof parsedBody["expires_in"] === "string") { + // normal number as a string '1504130527' + parsedBody.expiresIn = parseInt(parsedBody["expires_in"], 10); + } + delete parsedBody["expires_in"]; + } + if (parsedBody["not_before"]) { + parsedBody.notBefore = parsedBody["not_before"]; + if (typeof parsedBody["not_before"] === "string") { + // normal number as a string '1504130527' + parsedBody.notBefore = parseInt(parsedBody["not_before"], 10); + } + delete parsedBody["not_before"]; + } + if (parsedBody["expires_on"]) { + parsedBody.expiresOn = parsedBody["expires_on"]; + if (typeof parsedBody["expires_on"] === "string") { + // possibly a Date string '09/14/2017 00:00:00 PM +00:00' + if (parsedBody["expires_on"].includes(":") || parsedBody["expires_on"].includes("/")) { + parsedBody.expiresOn = new Date(parsedBody["expires_on"], 10); + } + else { + // normal number as a string '1504130527' + parsedBody.expiresOn = new Date(parseInt(parsedBody["expires_on"], 10)); + } + } + delete parsedBody["expires_on"]; + } + return parsedBody; + } + /** + * Signs a request with the Authentication header. + * + * @param {webResource} The WebResource to be signed. + * @return {Promise} Promise with signed WebResource. + */ + signRequest(webResource) { + return __awaiter(this, void 0, void 0, function* () { + const tokenResponse = yield this.getToken(); + webResource.headers.set(ms_rest_js_1.Constants.HeaderConstants.AUTHORIZATION, `${tokenResponse.tokenType} ${tokenResponse.accessToken}`); + return Promise.resolve(webResource); + }); + } +} +exports.MSITokenCredentials = MSITokenCredentials; +//# sourceMappingURL=msiTokenCredentials.js.map + +/***/ }), + +/***/ 73287: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const msiTokenCredentials_1 = __nccwpck_require__(6182); +const ms_rest_js_1 = __nccwpck_require__(30812); +/** + * @class MSIVmTokenCredentials + */ +class MSIVmTokenCredentials extends msiTokenCredentials_1.MSITokenCredentials { + constructor(options) { + if (!options) + options = {}; + super(options); + if (!options.msiEndpoint) { + options.msiEndpoint = "http://169.254.169.254/metadata/identity/oauth2/token"; + } + else if (typeof options.msiEndpoint !== "string") { + throw new Error("msiEndpoint must be a string."); + } + const urlBuilder = ms_rest_js_1.URLBuilder.parse(options.msiEndpoint); + if (!urlBuilder.getScheme()) { + options.msiEndpoint = `http://${options.msiEndpoint}`; + } + if (!options.apiVersion) { + options.apiVersion = "2018-02-01"; + } + else if (typeof options.apiVersion !== "string") { + throw new Error("apiVersion must be a string."); + } + if (!options.httpMethod) { + options.httpMethod = "GET"; + } + this.apiVersion = options.apiVersion; + this.msiEndpoint = options.msiEndpoint; + this.httpMethod = options.httpMethod; + this.objectId = options.objectId; + this.clientId = options.clientId; + this.identityId = options.identityId; + } + /** + * Prepares and sends a POST request to a service endpoint hosted on the Azure VM, which responds with the access token. + * @return {Promise} Promise with the tokenResponse (tokenType and accessToken are the two important properties). + */ + getToken() { + return __awaiter(this, void 0, void 0, function* () { + const reqOptions = this.prepareRequestOptions(); + let opRes; + let result; + opRes = yield this._httpClient.sendRequest(reqOptions); + result = this.parseTokenResponse(opRes.bodyAsText); + if (!result.tokenType) { + throw new Error(`Invalid token response, did not find tokenType. Response body is: ${opRes.bodyAsText}`); + } + else if (!result.accessToken) { + throw new Error(`Invalid token response, did not find accessToken. Response body is: ${opRes.bodyAsText}`); + } + return result; + }); + } + prepareRequestOptions() { + const reqOptions = { + url: this.msiEndpoint, + headers: { + "Content-Type": "application/x-www-form-urlencoded; charset=UTF-8", + "Metadata": "true" + }, + method: this.httpMethod, + queryParameters: { + "api-version": this.apiVersion, + "resource": this.resource, + "object_id": this.objectId, + "client_id": this.clientId, + "mi_res_id": this.identityId + } + }; + const webResource = new ms_rest_js_1.WebResource(); + return webResource.prepare(reqOptions); + } +} +exports.MSIVmTokenCredentials = MSIVmTokenCredentials; +//# sourceMappingURL=msiVmTokenCredentials.js.map + +/***/ }), + +/***/ 5558: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const ms_rest_js_1 = __nccwpck_require__(30812); +const ms_rest_azure_env_1 = __nccwpck_require__(40787); +const adal_node_1 = __nccwpck_require__(35894); +class TokenCredentialsBase { + constructor(clientId, domain, tokenAudience, environment = ms_rest_azure_env_1.Environment.AzureCloud, tokenCache = new adal_node_1.MemoryCache()) { + this.clientId = clientId; + this.domain = domain; + this.tokenAudience = tokenAudience; + this.environment = environment; + this.tokenCache = tokenCache; + if (!clientId || typeof clientId.valueOf() !== "string") { + throw new Error("clientId must be a non empty string."); + } + if (!domain || typeof domain.valueOf() !== "string") { + throw new Error("domain must be a non empty string."); + } + if (this.tokenAudience === "graph" && this.domain.toLowerCase() === "common") { + throw new Error(`${"If the tokenAudience is specified as \"graph\" then \"domain\" cannot be defaulted to \"commmon\" tenant.\ + It must be the actual tenant (preferrably a string in a guid format)."}`); + } + const authorityUrl = this.environment.activeDirectoryEndpointUrl + this.domain; + this.authContext = new adal_node_1.AuthenticationContext(authorityUrl, this.environment.validateAuthority, this.tokenCache); + } + getActiveDirectoryResourceId() { + let resource = this.environment.activeDirectoryResourceId; + if (this.tokenAudience) { + resource = this.tokenAudience; + if (this.tokenAudience.toLowerCase() === "graph") { + resource = this.environment.activeDirectoryGraphResourceId; + } + else if (this.tokenAudience.toLowerCase() === "batch") { + resource = this.environment.batchResourceId; + } + } + return resource; + } + getTokenFromCache(username) { + const self = this; + const resource = this.getActiveDirectoryResourceId(); + return new Promise((resolve, reject) => { + self.authContext.acquireToken(resource, username, self.clientId, (error, tokenResponse) => { + if (error) { + return reject(error); + } + if (tokenResponse.error || tokenResponse.errorDescription) { + return reject(tokenResponse); + } + return resolve(tokenResponse); + }); + }); + } + /** + * Signs a request with the Authentication header. + * + * @param {webResource} The WebResource to be signed. + * @param {function(error)} callback The callback function. + * @return {undefined} + */ + signRequest(webResource) { + return __awaiter(this, void 0, void 0, function* () { + const tokenResponse = yield this.getToken(); + webResource.headers.set(ms_rest_js_1.Constants.HeaderConstants.AUTHORIZATION, `${tokenResponse.tokenType} ${tokenResponse.accessToken}`); + return Promise.resolve(webResource); + }); + } +} +exports.TokenCredentialsBase = TokenCredentialsBase; +//# sourceMappingURL=tokenCredentialsBase.js.map + +/***/ }), + +/***/ 14502: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tokenCredentialsBase_1 = __nccwpck_require__(5558); +class UserTokenCredentials extends tokenCredentialsBase_1.TokenCredentialsBase { + /** + * Creates a new UserTokenCredentials object. + * + * @constructor + * @param {string} clientId The active directory application client id. + * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} + * for an example. + * @param {string} domain The domain or tenant id containing this application. + * @param {string} username The user name for the Organization Id account. + * @param {string} password The password for the Organization Id account. + * @param {string} [tokenAudience] The audience for which the token is requested. Valid values are 'graph', 'batch', or any other resource like 'https://vault.azure.net/'. + * If tokenAudience is 'graph' then domain should also be provided and its value should not be the default 'common' tenant. It must be a string (preferrably in a guid format). + * @param {Environment} [environment] The azure environment to authenticate with. + * @param {object} [tokenCache] The token cache. Default value is the MemoryCache object from adal. + */ + constructor(clientId, domain, username, password, tokenAudience, environment, tokenCache) { + if (!clientId || typeof clientId.valueOf() !== "string") { + throw new Error("clientId must be a non empty string."); + } + if (!domain || typeof domain.valueOf() !== "string") { + throw new Error("domain must be a non empty string."); + } + if (!username || typeof username.valueOf() !== "string") { + throw new Error("username must be a non empty string."); + } + if (!password || typeof password.valueOf() !== "string") { + throw new Error("password must be a non empty string."); + } + super(clientId, domain, tokenAudience, environment, tokenCache); + this.username = username; + this.password = password; + } + crossCheckUserNameWithToken(username, userIdFromToken) { + // to maintain the casing consistency between "azureprofile.json" and token cache. (RD 1996587) + // use the "userId" here, which should be the same with "username" except the casing. + return (username.toLowerCase() === userIdFromToken.toLowerCase()); + } + /** + * Tries to get the token from cache initially. If that is unsuccessful then it tries to get the token from ADAL. + * @returns {Promise} + * {object} [tokenResponse] The tokenResponse (tokenType and accessToken are the two important properties). + * @memberof UserTokenCredentials + */ + getToken() { + return __awaiter(this, void 0, void 0, function* () { + try { + return yield this.getTokenFromCache(this.username); + } + catch (error) { + const self = this; + const resource = this.getActiveDirectoryResourceId(); + return new Promise((resolve, reject) => { + self.authContext.acquireTokenWithUsernamePassword(resource, self.username, self.password, self.clientId, (error, tokenResponse) => { + if (error) { + return reject(error); + } + if (tokenResponse.error || tokenResponse.errorDescription) { + return reject(tokenResponse); + } + tokenResponse = tokenResponse; + if (self.crossCheckUserNameWithToken(self.username, tokenResponse.userId)) { + return resolve(tokenResponse); + } + else { + return reject(`The userId "${tokenResponse.userId}" in access token doesn"t match the username "${self.username}" provided during authentication.`); + } + }); + }); + } + }); + } +} +exports.UserTokenCredentials = UserTokenCredentials; +//# sourceMappingURL=userTokenCredentials.js.map + +/***/ }), + +/***/ 29325: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const adal = __nccwpck_require__(35894); +const msRest = __nccwpck_require__(30812); +const child_process_1 = __nccwpck_require__(63129); +const fs_1 = __nccwpck_require__(35747); +const ms_rest_azure_env_1 = __nccwpck_require__(40787); +const applicationTokenCredentials_1 = __nccwpck_require__(35216); +const applicationTokenCertificateCredentials_1 = __nccwpck_require__(2265); +const deviceTokenCredentials_1 = __nccwpck_require__(80450); +const userTokenCredentials_1 = __nccwpck_require__(14502); +const authConstants_1 = __nccwpck_require__(26700); +const subscriptionUtils_1 = __nccwpck_require__(47813); +const msiVmTokenCredentials_1 = __nccwpck_require__(73287); +const msiAppServiceTokenCredentials_1 = __nccwpck_require__(68376); +/** + * @constant {Array} managementPlaneTokenAudiences - Urls for management plane token + * audience across different azure environments. + */ +const managementPlaneTokenAudiences = [ + "https://management.core.windows.net/", + "https://management.core.chinacloudapi.cn/", + "https://management.core.usgovcloudapi.net/", + "https://management.core.cloudapi.de/", + "https://management.azure.com/", + "https://management.core.windows.net", + "https://management.core.chinacloudapi.cn", + "https://management.core.usgovcloudapi.net", + "https://management.core.cloudapi.de", + "https://management.azure.com" +]; +function turnOnLogging() { + const log = adal.Logging; + log.setLoggingOptions({ + level: 3, + log: function (level, message, error) { + level; + console.info(message); + if (error) { + console.error(error); + } + } + }); +} +if (process.env["AZURE_ADAL_LOGGING_ENABLED"]) { + turnOnLogging(); +} +/** + * Provides a UserTokenCredentials object and the list of subscriptions associated with that userId across all the applicable tenants. + * This method is applicable only for organizational ids that are not 2FA enabled otherwise please use interactive login. + * + * @param {string} username The user name for the Organization Id account. + * @param {string} password The password for the Organization Id account. + * @param {object} [options] Object representing optional parameters. + * @param {string} [options.clientId] The active directory application client id. + * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} + * for an example. + * @param {string} [options.tokenAudience] The audience for which the token is requested. Valid values are 'graph', 'batch', or any other resource like 'https://vault.azure.net/'. + * If tokenAudience is 'graph' then domain should also be provided and its value should not be the default 'common' tenant. It must be a string (preferrably in a guid format). + * @param {string} [options.domain] The domain or tenant id containing this application. Default value "common". + * @param {Environment} [options.environment] The azure environment to authenticate with. + * @param {object} [options.tokenCache] The token cache. Default value is the MemoryCache object from adal. + * + * @returns {Promise} A Promise that resolves to AuthResponse that contains "credentials" and optional "subscriptions" array and rejects with an Error. + */ +function withUsernamePasswordWithAuthResponse(username, password, options) { + return __awaiter(this, void 0, void 0, function* () { + if (!options) { + options = {}; + } + if (!options.clientId) { + options.clientId = authConstants_1.AuthConstants.DEFAULT_ADAL_CLIENT_ID; + } + if (!options.domain) { + options.domain = authConstants_1.AuthConstants.AAD_COMMON_TENANT; + } + if (!options.environment) { + options.environment = ms_rest_azure_env_1.Environment.AzureCloud; + } + let creds; + let tenantList = []; + let subscriptionList = []; + try { + creds = new userTokenCredentials_1.UserTokenCredentials(options.clientId, options.domain, username, password, options.tokenAudience, options.environment); + yield creds.getToken(); + // The token cache gets propulated for all the tenants as a part of building the tenantList. + tenantList = yield subscriptionUtils_1.buildTenantList(creds); + subscriptionList = yield _getSubscriptions(creds, tenantList, options.tokenAudience); + } + catch (err) { + return Promise.reject(err); + } + return Promise.resolve({ credentials: creds, subscriptions: subscriptionList }); + }); +} +exports.withUsernamePasswordWithAuthResponse = withUsernamePasswordWithAuthResponse; +/** + * Provides an ApplicationTokenCredentials object and the list of subscriptions associated with that servicePrinicpalId/clientId across all the applicable tenants. + * + * @param {string} clientId The active directory application client id also known as the SPN (ServicePrincipal Name). + * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} + * for an example. + * @param {string} secret The application secret for the service principal. + * @param {string} domain The domain or tenant id containing this application. + * @param {object} [options] Object representing optional parameters. + * @param {string} [options.tokenAudience] The audience for which the token is requested. Valid values are 'graph', 'batch', or any other resource like 'https://vault.azure.net/'. + * If tokenAudience is 'graph' then domain should also be provided and its value should not be the default 'common' tenant. It must be a string (preferrably in a guid format). + * @param {Environment} [options.environment] The azure environment to authenticate with. + * @param {object} [options.tokenCache] The token cache. Default value is the MemoryCache object from adal. + * + * @returns {Promise} A Promise that resolves to AuthResponse that contains "credentials" and optional "subscriptions" array and rejects with an Error. + */ +function withServicePrincipalSecretWithAuthResponse(clientId, secret, domain, options) { + return __awaiter(this, void 0, void 0, function* () { + if (!options) { + options = {}; + } + if (!options.environment) { + options.environment = ms_rest_azure_env_1.Environment.AzureCloud; + } + let creds; + let subscriptionList = []; + try { + creds = new applicationTokenCredentials_1.ApplicationTokenCredentials(clientId, domain, secret, options.tokenAudience, options.environment); + yield creds.getToken(); + subscriptionList = yield _getSubscriptions(creds, [domain], options.tokenAudience); + } + catch (err) { + return Promise.reject(err); + } + return Promise.resolve({ credentials: creds, subscriptions: subscriptionList }); + }); +} +exports.withServicePrincipalSecretWithAuthResponse = withServicePrincipalSecretWithAuthResponse; +/** + * Provides an ApplicationTokenCertificateCredentials object and the list of subscriptions associated with that servicePrinicpalId/clientId across all the applicable tenants. + * + * @param {string} clientId The active directory application client id also known as the SPN (ServicePrincipal Name). + * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} + * for an example. + * @param {string} certificateStringOrFilePath A PEM encoded certificate and private key OR an absolute filepath to the .pem file containing that information. For example: + * - CertificateString: "-----BEGIN PRIVATE KEY-----\n\n-----END PRIVATE KEY-----\n-----BEGIN CERTIFICATE-----\n\n-----END CERTIFICATE-----\n" + * - CertificateFilePath: **Absolute** file path of the .pem file. + * @param {string} domain The domain or tenant id containing this application. + * @param {object} [options] Object representing optional parameters. + * @param {string} [options.tokenAudience] The audience for which the token is requested. Valid values are 'graph', 'batch', or any other resource like 'https://vault.azure.net/'. + * If tokenAudience is 'graph' then domain should also be provided and its value should not be the default 'common' tenant. It must be a string (preferrably in a guid format). + * @param {Environment} [options.environment] The azure environment to authenticate with. + * @param {object} [options.tokenCache] The token cache. Default value is the MemoryCache object from adal. + * + * @returns {Promise} A Promise that resolves to AuthResponse that contains "credentials" and optional "subscriptions" array and rejects with an Error. + */ +function withServicePrincipalCertificateWithAuthResponse(clientId, certificateStringOrFilePath, domain, options) { + return __awaiter(this, void 0, void 0, function* () { + if (!options) { + options = {}; + } + if (!options.environment) { + options.environment = ms_rest_azure_env_1.Environment.AzureCloud; + } + let creds; + let subscriptionList = []; + try { + creds = applicationTokenCertificateCredentials_1.ApplicationTokenCertificateCredentials.create(clientId, certificateStringOrFilePath, domain, options); + yield creds.getToken(); + subscriptionList = yield _getSubscriptions(creds, [domain], options.tokenAudience); + } + catch (err) { + return Promise.reject(err); + } + return Promise.resolve({ credentials: creds, subscriptions: subscriptionList }); }); } exports.withServicePrincipalCertificateWithAuthResponse = withServicePrincipalCertificateWithAuthResponse; @@ -9427,7 +10627,7 @@ function lexSort (a, b) { */ -var _ = __nccwpck_require__(83571); +var _ = __nccwpck_require__(4987); var ac = __nccwpck_require__(69775); var authParams = __nccwpck_require__(74854); @@ -9488,7 +10688,7 @@ module.exports = exports; */ -var _ = __nccwpck_require__(83571); +var _ = __nccwpck_require__(4987); var constants = __nccwpck_require__(10282); var UserCodeResponseFields = constants.UserCodeResponseFields; @@ -10316,7 +11516,7 @@ module.exports = exports; var request = __nccwpck_require__(48699); var url = __nccwpck_require__(78835); -var _ = __nccwpck_require__(83571); +var _ = __nccwpck_require__(4987); var AADConstants = __nccwpck_require__(10282).AADConstants; var Logger = __nccwpck_require__(92288).Logger; @@ -10602,7 +11802,7 @@ module.exports.W = Authority; -var _ = __nccwpck_require__(83571); +var _ = __nccwpck_require__(4987); var crypto = __nccwpck_require__(76417); __nccwpck_require__(1693); // Adds a number of convenience methods to the builtin Date object. @@ -11451,8 +12651,8 @@ module.exports = Constants; */ -var _ = __nccwpck_require__(83571); -var uuid = __nccwpck_require__(2155); // want to replace with this in the future: https://gist.github.com/jed/982883 +var _ = __nccwpck_require__(4987); +var uuid = __nccwpck_require__(19744); // want to replace with this in the future: https://gist.github.com/jed/982883 @@ -11696,7 +12896,7 @@ module.exports = exports; -var _ = __nccwpck_require__(83571); +var _ = __nccwpck_require__(4987); /** * Constructs a new in memory token cache. @@ -11796,7 +12996,7 @@ module.exports = MemoryCache; var request = __nccwpck_require__(48699); var url = __nccwpck_require__(78835); var DOMParser = __nccwpck_require__(57286).DOMParser; -var _ = __nccwpck_require__(83571); +var _ = __nccwpck_require__(4987); var Logger = __nccwpck_require__(92288).Logger; var util = __nccwpck_require__(5336); @@ -12150,10 +13350,10 @@ module.exports = Mex; */ -var _ = __nccwpck_require__(83571); +var _ = __nccwpck_require__(4987); __nccwpck_require__(1693); // Adds a number of convenience methods to the builtin Date object. var querystring = __nccwpck_require__(71191); -var uuid = __nccwpck_require__(2155); +var uuid = __nccwpck_require__(19744); var request = __nccwpck_require__(48699); var url = __nccwpck_require__(78835); var async = __nccwpck_require__(57888); @@ -12698,7 +13898,7 @@ var util = __nccwpck_require__(5336); __nccwpck_require__(1693); var jws = __nccwpck_require__(4636); -var uuid = __nccwpck_require__(2155); +var uuid = __nccwpck_require__(19744); /** * JavaScript dates are in milliseconds, but JWT dates are in seconds. @@ -13492,7 +14692,7 @@ module.exports = TokenRequest; var querystring = __nccwpck_require__(71191); var request = __nccwpck_require__(48699); -var _ = __nccwpck_require__(83571); +var _ = __nccwpck_require__(4987); var url = __nccwpck_require__(78835); var constants = __nccwpck_require__(10282); @@ -13765,7 +14965,7 @@ module.exports = UserRealm; */ -var _ = __nccwpck_require__(83571); +var _ = __nccwpck_require__(4987); var adalIdConstants = __nccwpck_require__(10282).AdalIdParameters; var os = __nccwpck_require__(12087); var url = __nccwpck_require__(78835); @@ -13961,7 +15161,7 @@ module.exports.convertRegularToUrlSafeBase64EncodedString = convertRegularToUrlS var request = __nccwpck_require__(48699); -var uuid = __nccwpck_require__(2155); +var uuid = __nccwpck_require__(19744); var Logger = __nccwpck_require__(92288).Logger; var util = __nccwpck_require__(5336); @@ -14503,7 +15703,7 @@ module.exports = WSTrustResponse; */ -var _ = __nccwpck_require__(83571); +var _ = __nccwpck_require__(4987); var select = __nccwpck_require__(74277); var XMLSerializer = __nccwpck_require__(57286).XMLSerializer; @@ -14609,6 +15809,221 @@ var exports = { module.exports = exports; +/***/ }), + +/***/ 19744: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var v1 = __nccwpck_require__(64425); +var v4 = __nccwpck_require__(85258); + +var uuid = v4; +uuid.v1 = v1; +uuid.v4 = v4; + +module.exports = uuid; + + +/***/ }), + +/***/ 54121: +/***/ ((module) => { + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +var byteToHex = []; +for (var i = 0; i < 256; ++i) { + byteToHex[i] = (i + 0x100).toString(16).substr(1); +} + +function bytesToUuid(buf, offset) { + var i = offset || 0; + var bth = byteToHex; + // join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4 + return ([ + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]] + ]).join(''); +} + +module.exports = bytesToUuid; + + +/***/ }), + +/***/ 57455: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +// Unique ID creation requires a high quality random # generator. In node.js +// this is pretty straight-forward - we use the crypto API. + +var crypto = __nccwpck_require__(76417); + +module.exports = function nodeRNG() { + return crypto.randomBytes(16); +}; + + +/***/ }), + +/***/ 64425: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var rng = __nccwpck_require__(57455); +var bytesToUuid = __nccwpck_require__(54121); + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html + +var _nodeId; +var _clockseq; + +// Previous uuid creation time +var _lastMSecs = 0; +var _lastNSecs = 0; + +// See https://github.com/uuidjs/uuid for API details +function v1(options, buf, offset) { + var i = buf && offset || 0; + var b = buf || []; + + options = options || {}; + var node = options.node || _nodeId; + var clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; + + // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + if (node == null || clockseq == null) { + var seedBytes = rng(); + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [ + seedBytes[0] | 0x01, + seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5] + ]; + } + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } + + // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + var msecs = options.msecs !== undefined ? options.msecs : new Date().getTime(); + + // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + var nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; + + // Time since last uuid creation (in msecs) + var dt = (msecs - _lastMSecs) + (nsecs - _lastNSecs)/10000; + + // Per 4.2.1.2, Bump clockseq on clock regression + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } + + // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } + + // Per 4.2.1.2 Throw error if too many uuids are requested + if (nsecs >= 10000) { + throw new Error('uuid.v1(): Can\'t create more than 10M uuids/sec'); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; + + // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + msecs += 12219292800000; + + // `time_low` + var tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; + + // `time_mid` + var tmh = (msecs / 0x100000000 * 10000) & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; + + // `time_high_and_version` + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + b[i++] = tmh >>> 16 & 0xff; + + // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + b[i++] = clockseq >>> 8 | 0x80; + + // `clock_seq_low` + b[i++] = clockseq & 0xff; + + // `node` + for (var n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf ? buf : bytesToUuid(b); +} + +module.exports = v1; + + +/***/ }), + +/***/ 85258: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var rng = __nccwpck_require__(57455); +var bytesToUuid = __nccwpck_require__(54121); + +function v4(options, buf, offset) { + var i = buf && offset || 0; + + if (typeof(options) == 'string') { + buf = options === 'binary' ? new Array(16) : null; + options = null; + } + options = options || {}; + + var rnds = options.random || (options.rng || rng)(); + + // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + rnds[6] = (rnds[6] & 0x0f) | 0x40; + rnds[8] = (rnds[8] & 0x3f) | 0x80; + + // Copy bytes to buffer, if provided + if (buf) { + for (var ii = 0; ii < 16; ++ii) { + buf[i + ii] = rnds[ii]; + } + } + + return buf || bytesToUuid(rnds); +} + +module.exports = v4; + + /***/ }), /***/ 64941: @@ -20202,18 +21617,6 @@ function validateKeyword(definition, throwError) { } -/***/ }), - -/***/ 65063: -/***/ ((module) => { - -"use strict"; - -module.exports = function () { - return /[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-PRZcf-nqry=><]/g; -}; - - /***/ }), /***/ 52068: @@ -20690,12556 +22093,9847 @@ exports.TrackerStream = __nccwpck_require__(31375) /***/ }), -/***/ 92931: +/***/ 10165: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. -// a duplex stream is just a stream that is both readable and writable. -// Since JS doesn't have multiple prototypal inheritance, this class -// prototypally inherits from Readable, and then parasitically from -// Writable. +var EventEmitter = __nccwpck_require__(28614).EventEmitter +var util = __nccwpck_require__(31669) +var trackerId = 0 +var TrackerBase = module.exports = function (name) { + EventEmitter.call(this) + this.id = ++trackerId + this.name = name +} +util.inherits(TrackerBase, EventEmitter) -/**/ +/***/ }), -var pna = __nccwpck_require__(47810); -/**/ +/***/ 60660: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/**/ -var objectKeys = Object.keys || function (obj) { - var keys = []; - for (var key in obj) { - keys.push(key); - }return keys; -}; -/**/ +"use strict"; -module.exports = Duplex; +var util = __nccwpck_require__(31669) +var TrackerBase = __nccwpck_require__(10165) +var Tracker = __nccwpck_require__(8074) +var TrackerStream = __nccwpck_require__(31375) -/**/ -var util = Object.create(__nccwpck_require__(95898)); -util.inherits = __nccwpck_require__(44124); -/**/ +var TrackerGroup = module.exports = function (name) { + TrackerBase.call(this, name) + this.parentGroup = null + this.trackers = [] + this.completion = {} + this.weight = {} + this.totalWeight = 0 + this.finished = false + this.bubbleChange = bubbleChange(this) +} +util.inherits(TrackerGroup, TrackerBase) -var Readable = __nccwpck_require__(70661); -var Writable = __nccwpck_require__(7631); +function bubbleChange (trackerGroup) { + return function (name, completed, tracker) { + trackerGroup.completion[tracker.id] = completed + if (trackerGroup.finished) return + trackerGroup.emit('change', name || trackerGroup.name, trackerGroup.completed(), trackerGroup) + } +} -util.inherits(Duplex, Readable); +TrackerGroup.prototype.nameInTree = function () { + var names = [] + var from = this + while (from) { + names.unshift(from.name) + from = from.parentGroup + } + return names.join('/') +} -{ - // avoid scope creep, the keys array can then be collected - var keys = objectKeys(Writable.prototype); - for (var v = 0; v < keys.length; v++) { - var method = keys[v]; - if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; +TrackerGroup.prototype.addUnit = function (unit, weight) { + if (unit.addUnit) { + var toTest = this + while (toTest) { + if (unit === toTest) { + throw new Error( + 'Attempted to add tracker group ' + + unit.name + ' to tree that already includes it ' + + this.nameInTree(this)) + } + toTest = toTest.parentGroup + } + unit.parentGroup = this } + this.weight[unit.id] = weight || 1 + this.totalWeight += this.weight[unit.id] + this.trackers.push(unit) + this.completion[unit.id] = unit.completed() + unit.on('change', this.bubbleChange) + if (!this.finished) this.emit('change', unit.name, this.completion[unit.id], unit) + return unit } -function Duplex(options) { - if (!(this instanceof Duplex)) return new Duplex(options); +TrackerGroup.prototype.completed = function () { + if (this.trackers.length === 0) return 0 + var valPerWeight = 1 / this.totalWeight + var completed = 0 + for (var ii = 0; ii < this.trackers.length; ii++) { + var trackerId = this.trackers[ii].id + completed += valPerWeight * this.weight[trackerId] * this.completion[trackerId] + } + return completed +} - Readable.call(this, options); - Writable.call(this, options); +TrackerGroup.prototype.newGroup = function (name, weight) { + return this.addUnit(new TrackerGroup(name), weight) +} - if (options && options.readable === false) this.readable = false; +TrackerGroup.prototype.newItem = function (name, todo, weight) { + return this.addUnit(new Tracker(name, todo), weight) +} - if (options && options.writable === false) this.writable = false; +TrackerGroup.prototype.newStream = function (name, todo, weight) { + return this.addUnit(new TrackerStream(name, todo), weight) +} - this.allowHalfOpen = true; - if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; +TrackerGroup.prototype.finish = function () { + this.finished = true + if (!this.trackers.length) this.addUnit(new Tracker(), 1, true) + for (var ii = 0; ii < this.trackers.length; ii++) { + var tracker = this.trackers[ii] + tracker.finish() + tracker.removeListener('change', this.bubbleChange) + } + this.emit('change', this.name, 1, this) +} - this.once('end', onend); +var buffer = ' ' +TrackerGroup.prototype.debug = function (depth) { + depth = depth || 0 + var indent = depth ? buffer.substr(0, depth) : '' + var output = indent + (this.name || 'top') + ': ' + this.completed() + '\n' + this.trackers.forEach(function (tracker) { + if (tracker instanceof TrackerGroup) { + output += tracker.debug(depth + 1) + } else { + output += indent + ' ' + tracker.name + ': ' + tracker.completed() + '\n' + } + }) + return output } -Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function () { - return this._writableState.highWaterMark; - } -}); -// the no-half-open enforcer -function onend() { - // if we allow half-open state, or if the writable side ended, - // then we're ok. - if (this.allowHalfOpen || this._writableState.ended) return; +/***/ }), - // no more data can be written. - // But allow more writes to happen in this tick. - pna.nextTick(onEndNT, this); +/***/ 31375: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var util = __nccwpck_require__(31669) +var stream = __nccwpck_require__(51642) +var delegate = __nccwpck_require__(61318) +var Tracker = __nccwpck_require__(8074) + +var TrackerStream = module.exports = function (name, size, options) { + stream.Transform.call(this, options) + this.tracker = new Tracker(name, size) + this.name = name + this.id = this.tracker.id + this.tracker.on('change', delegateChange(this)) } +util.inherits(TrackerStream, stream.Transform) -function onEndNT(self) { - self.end(); +function delegateChange (trackerStream) { + return function (name, completion, tracker) { + trackerStream.emit('change', name, completion, trackerStream) + } } -Object.defineProperty(Duplex.prototype, 'destroyed', { - get: function () { - if (this._readableState === undefined || this._writableState === undefined) { - return false; - } - return this._readableState.destroyed && this._writableState.destroyed; - }, - set: function (value) { - // we ignore the value if the stream - // has not been initialized yet - if (this._readableState === undefined || this._writableState === undefined) { - return; - } +TrackerStream.prototype._transform = function (data, encoding, cb) { + this.tracker.completeWork(data.length ? data.length : 1) + this.push(data) + cb() +} - // backward compatibility, the user is explicitly - // managing destroyed - this._readableState.destroyed = value; - this._writableState.destroyed = value; - } -}); +TrackerStream.prototype._flush = function (cb) { + this.tracker.finish() + cb() +} -Duplex.prototype._destroy = function (err, cb) { - this.push(null); - this.end(); +delegate(TrackerStream.prototype, 'tracker') + .method('completed') + .method('addWork') + .method('finish') - pna.nextTick(cb, err); -}; /***/ }), -/***/ 61894: +/***/ 8074: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. -// a passthrough stream. -// basically just the most minimal sort of Transform stream. -// Every written chunk gets output as-is. +var util = __nccwpck_require__(31669) +var TrackerBase = __nccwpck_require__(10165) +var Tracker = module.exports = function (name, todo) { + TrackerBase.call(this, name) + this.workDone = 0 + this.workTodo = todo || 0 +} +util.inherits(Tracker, TrackerBase) +Tracker.prototype.completed = function () { + return this.workTodo === 0 ? 0 : this.workDone / this.workTodo +} -module.exports = PassThrough; +Tracker.prototype.addWork = function (work) { + this.workTodo += work + this.emit('change', this.name, this.completed(), this) +} -var Transform = __nccwpck_require__(53937); +Tracker.prototype.completeWork = function (work) { + this.workDone += work + if (this.workDone > this.workTodo) this.workDone = this.workTodo + this.emit('change', this.name, this.completed(), this) +} -/**/ -var util = Object.create(__nccwpck_require__(95898)); -util.inherits = __nccwpck_require__(44124); -/**/ +Tracker.prototype.finish = function () { + this.workTodo = this.workDone = 1 + this.emit('change', this.name, 1, this) +} -util.inherits(PassThrough, Transform); -function PassThrough(options) { - if (!(this instanceof PassThrough)) return new PassThrough(options); +/***/ }), - Transform.call(this, options); -} +/***/ 99348: +/***/ ((module) => { -PassThrough.prototype._transform = function (chunk, encoding, cb) { - cb(null, chunk); -}; +// Copyright 2011 Mark Cavage All rights reserved. -/***/ }), -/***/ 70661: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +module.exports = { -"use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. + newInvalidAsn1Error: function (msg) { + var e = new Error(); + e.name = 'InvalidAsn1Error'; + e.message = msg || ''; + return e; + } +}; -/**/ +/***/ }), -var pna = __nccwpck_require__(47810); -/**/ +/***/ 194: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -module.exports = Readable; +// Copyright 2011 Mark Cavage All rights reserved. -/**/ -var isArray = __nccwpck_require__(20893); -/**/ +var errors = __nccwpck_require__(99348); +var types = __nccwpck_require__(42473); -/**/ -var Duplex; -/**/ +var Reader = __nccwpck_require__(20290); +var Writer = __nccwpck_require__(43200); -Readable.ReadableState = ReadableState; -/**/ -var EE = __nccwpck_require__(28614).EventEmitter; +// --- Exports -var EElistenerCount = function (emitter, type) { - return emitter.listeners(type).length; -}; -/**/ +module.exports = { -/**/ -var Stream = __nccwpck_require__(69777); -/**/ + Reader: Reader, -/**/ + Writer: Writer -var Buffer = __nccwpck_require__(95237).Buffer; -var OurUint8Array = global.Uint8Array || function () {}; -function _uint8ArrayToBuffer(chunk) { - return Buffer.from(chunk); +}; + +for (var t in types) { + if (types.hasOwnProperty(t)) + module.exports[t] = types[t]; } -function _isUint8Array(obj) { - return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +for (var e in errors) { + if (errors.hasOwnProperty(e)) + module.exports[e] = errors[e]; } -/**/ -/**/ -var util = Object.create(__nccwpck_require__(95898)); -util.inherits = __nccwpck_require__(44124); -/**/ +/***/ }), -/**/ -var debugUtil = __nccwpck_require__(31669); -var debug = void 0; -if (debugUtil && debugUtil.debuglog) { - debug = debugUtil.debuglog('stream'); -} else { - debug = function () {}; -} -/**/ +/***/ 20290: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var BufferList = __nccwpck_require__(52743); -var destroyImpl = __nccwpck_require__(69147); -var StringDecoder; +// Copyright 2011 Mark Cavage All rights reserved. -util.inherits(Readable, Stream); +var assert = __nccwpck_require__(42357); +var Buffer = __nccwpck_require__(15118).Buffer; -var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; +var ASN1 = __nccwpck_require__(42473); +var errors = __nccwpck_require__(99348); -function prependListener(emitter, event, fn) { - // Sadly this is not cacheable as some libraries bundle their own - // event emitter implementation with them. - if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); - // This is a hack to make sure that our error handler is attached before any - // userland ones. NEVER DO THIS. This is here only because this code needs - // to continue to work with older versions of Node.js that do not include - // the prependListener() method. The goal is to eventually remove this hack. - if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; -} +// --- Globals -function ReadableState(options, stream) { - Duplex = Duplex || __nccwpck_require__(92931); +var newInvalidAsn1Error = errors.newInvalidAsn1Error; - options = options || {}; - // Duplex streams are both readable and writable, but share - // the same options object. - // However, some cases require setting options to different - // values for the readable and the writable sides of the duplex stream. - // These options can be provided separately as readableXXX and writableXXX. - var isDuplex = stream instanceof Duplex; - // object stream flag. Used to make read(n) ignore n and to - // make all the buffer merging and length checks go away - this.objectMode = !!options.objectMode; +// --- API - if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; +function Reader(data) { + if (!data || !Buffer.isBuffer(data)) + throw new TypeError('data must be a node Buffer'); - // the point at which it stops calling _read() to fill the buffer - // Note: 0 is a valid value, means "don't call _read preemptively ever" - var hwm = options.highWaterMark; - var readableHwm = options.readableHighWaterMark; - var defaultHwm = this.objectMode ? 16 : 16 * 1024; + this._buf = data; + this._size = data.length; - if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm; + // These hold the "current" state + this._len = 0; + this._offset = 0; +} - // cast to ints. - this.highWaterMark = Math.floor(this.highWaterMark); +Object.defineProperty(Reader.prototype, 'length', { + enumerable: true, + get: function () { return (this._len); } +}); - // A linked list is used to store data chunks instead of an array because the - // linked list can remove elements from the beginning faster than - // array.shift() - this.buffer = new BufferList(); - this.length = 0; - this.pipes = null; - this.pipesCount = 0; - this.flowing = null; - this.ended = false; - this.endEmitted = false; - this.reading = false; +Object.defineProperty(Reader.prototype, 'offset', { + enumerable: true, + get: function () { return (this._offset); } +}); - // a flag to be able to tell if the event 'readable'/'data' is emitted - // immediately, or on a later tick. We set this to true at first, because - // any actions that shouldn't happen until "later" should generally also - // not happen before the first read call. - this.sync = true; +Object.defineProperty(Reader.prototype, 'remain', { + get: function () { return (this._size - this._offset); } +}); - // whenever we return null, then we set a flag to say - // that we're awaiting a 'readable' event emission. - this.needReadable = false; - this.emittedReadable = false; - this.readableListening = false; - this.resumeScheduled = false; +Object.defineProperty(Reader.prototype, 'buffer', { + get: function () { return (this._buf.slice(this._offset)); } +}); - // has it been destroyed - this.destroyed = false; - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = options.defaultEncoding || 'utf8'; +/** + * Reads a single byte and advances offset; you can pass in `true` to make this + * a "peek" operation (i.e., get the byte, but don't advance the offset). + * + * @param {Boolean} peek true means don't move offset. + * @return {Number} the next byte, null if not enough data. + */ +Reader.prototype.readByte = function (peek) { + if (this._size - this._offset < 1) + return null; - // the number of writers that are awaiting a drain event in .pipe()s - this.awaitDrain = 0; + var b = this._buf[this._offset] & 0xff; - // if true, a maybeReadMore has been scheduled - this.readingMore = false; + if (!peek) + this._offset += 1; - this.decoder = null; - this.encoding = null; - if (options.encoding) { - if (!StringDecoder) StringDecoder = __nccwpck_require__(63824)/* .StringDecoder */ .s; - this.decoder = new StringDecoder(options.encoding); - this.encoding = options.encoding; - } -} + return b; +}; -function Readable(options) { - Duplex = Duplex || __nccwpck_require__(92931); - if (!(this instanceof Readable)) return new Readable(options); +Reader.prototype.peek = function () { + return this.readByte(true); +}; - this._readableState = new ReadableState(options, this); - // legacy - this.readable = true; +/** + * Reads a (potentially) variable length off the BER buffer. This call is + * not really meant to be called directly, as callers have to manipulate + * the internal buffer afterwards. + * + * As a result of this call, you can call `Reader.length`, until the + * next thing called that does a readLength. + * + * @return {Number} the amount of offset to advance the buffer. + * @throws {InvalidAsn1Error} on bad ASN.1 + */ +Reader.prototype.readLength = function (offset) { + if (offset === undefined) + offset = this._offset; - if (options) { - if (typeof options.read === 'function') this._read = options.read; + if (offset >= this._size) + return null; - if (typeof options.destroy === 'function') this._destroy = options.destroy; - } + var lenB = this._buf[offset++] & 0xff; + if (lenB === null) + return null; - Stream.call(this); -} + if ((lenB & 0x80) === 0x80) { + lenB &= 0x7f; -Object.defineProperty(Readable.prototype, 'destroyed', { - get: function () { - if (this._readableState === undefined) { - return false; - } - return this._readableState.destroyed; - }, - set: function (value) { - // we ignore the value if the stream - // has not been initialized yet - if (!this._readableState) { - return; - } + if (lenB === 0) + throw newInvalidAsn1Error('Indefinite length not supported'); - // backward compatibility, the user is explicitly - // managing destroyed - this._readableState.destroyed = value; - } -}); + if (lenB > 4) + throw newInvalidAsn1Error('encoding too long'); -Readable.prototype.destroy = destroyImpl.destroy; -Readable.prototype._undestroy = destroyImpl.undestroy; -Readable.prototype._destroy = function (err, cb) { - this.push(null); - cb(err); -}; + if (this._size - offset < lenB) + return null; -// Manually shove something into the read() buffer. -// This returns true if the highWaterMark has not been hit yet, -// similar to how Writable.write() returns true if you should -// write() some more. -Readable.prototype.push = function (chunk, encoding) { - var state = this._readableState; - var skipChunkCheck; + this._len = 0; + for (var i = 0; i < lenB; i++) + this._len = (this._len << 8) + (this._buf[offset++] & 0xff); - if (!state.objectMode) { - if (typeof chunk === 'string') { - encoding = encoding || state.defaultEncoding; - if (encoding !== state.encoding) { - chunk = Buffer.from(chunk, encoding); - encoding = ''; - } - skipChunkCheck = true; - } } else { - skipChunkCheck = true; + // Wasn't a variable length + this._len = lenB; } - return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); -}; - -// Unshift should *always* be something directly out of read() -Readable.prototype.unshift = function (chunk) { - return readableAddChunk(this, chunk, null, true, false); + return offset; }; -function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { - var state = stream._readableState; - if (chunk === null) { - state.reading = false; - onEofChunk(stream, state); - } else { - var er; - if (!skipChunkCheck) er = chunkInvalid(state, chunk); - if (er) { - stream.emit('error', er); - } else if (state.objectMode || chunk && chunk.length > 0) { - if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { - chunk = _uint8ArrayToBuffer(chunk); - } - if (addToFront) { - if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true); - } else if (state.ended) { - stream.emit('error', new Error('stream.push() after EOF')); - } else { - state.reading = false; - if (state.decoder && !encoding) { - chunk = state.decoder.write(chunk); - if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); - } else { - addChunk(stream, state, chunk, false); - } - } - } else if (!addToFront) { - state.reading = false; - } - } +/** + * Parses the next sequence in this BER buffer. + * + * To get the length of the sequence, call `Reader.length`. + * + * @return {Number} the sequence's tag. + */ +Reader.prototype.readSequence = function (tag) { + var seq = this.peek(); + if (seq === null) + return null; + if (tag !== undefined && tag !== seq) + throw newInvalidAsn1Error('Expected 0x' + tag.toString(16) + + ': got 0x' + seq.toString(16)); - return needMoreData(state); -} + var o = this.readLength(this._offset + 1); // stored in `length` + if (o === null) + return null; -function addChunk(stream, state, chunk, addToFront) { - if (state.flowing && state.length === 0 && !state.sync) { - stream.emit('data', chunk); - stream.read(0); - } else { - // update the buffer info. - state.length += state.objectMode ? 1 : chunk.length; - if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + this._offset = o; + return seq; +}; - if (state.needReadable) emitReadable(stream); - } - maybeReadMore(stream, state); -} -function chunkInvalid(state, chunk) { - var er; - if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { - er = new TypeError('Invalid non-string/buffer chunk'); - } - return er; -} +Reader.prototype.readInt = function () { + return this._readTag(ASN1.Integer); +}; -// if it's past the high water mark, we can push in some more. -// Also, if we have no data yet, we can stand some -// more bytes. This is to work around cases where hwm=0, -// such as the repl. Also, if the push() triggered a -// readable event, and the user called read(largeNumber) such that -// needReadable was set, then we ought to push more, so that another -// 'readable' event will be triggered. -function needMoreData(state) { - return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); -} -Readable.prototype.isPaused = function () { - return this._readableState.flowing === false; +Reader.prototype.readBoolean = function () { + return (this._readTag(ASN1.Boolean) === 0 ? false : true); }; -// backwards compatibility. -Readable.prototype.setEncoding = function (enc) { - if (!StringDecoder) StringDecoder = __nccwpck_require__(63824)/* .StringDecoder */ .s; - this._readableState.decoder = new StringDecoder(enc); - this._readableState.encoding = enc; - return this; + +Reader.prototype.readEnumeration = function () { + return this._readTag(ASN1.Enumeration); }; -// Don't raise the hwm > 8MB -var MAX_HWM = 0x800000; -function computeNewHighWaterMark(n) { - if (n >= MAX_HWM) { - n = MAX_HWM; - } else { - // Get the next highest power of 2 to prevent increasing hwm excessively in - // tiny amounts - n--; - n |= n >>> 1; - n |= n >>> 2; - n |= n >>> 4; - n |= n >>> 8; - n |= n >>> 16; - n++; - } - return n; -} -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function howMuchToRead(n, state) { - if (n <= 0 || state.length === 0 && state.ended) return 0; - if (state.objectMode) return 1; - if (n !== n) { - // Only flow one buffer at a time - if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; - } - // If we're asking for more than the current hwm, then raise the hwm. - if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); - if (n <= state.length) return n; - // Don't have enough - if (!state.ended) { - state.needReadable = true; - return 0; - } - return state.length; -} +Reader.prototype.readString = function (tag, retbuf) { + if (!tag) + tag = ASN1.OctetString; -// you can override either this method, or the async _read(n) below. -Readable.prototype.read = function (n) { - debug('read', n); - n = parseInt(n, 10); - var state = this._readableState; - var nOrig = n; + var b = this.peek(); + if (b === null) + return null; - if (n !== 0) state.emittedReadable = false; + if (b !== tag) + throw newInvalidAsn1Error('Expected 0x' + tag.toString(16) + + ': got 0x' + b.toString(16)); - // if we're doing read(0) to trigger a readable event, but we - // already have a bunch of data in the buffer, then just trigger - // the 'readable' event and move on. - if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { - debug('read: emitReadable', state.length, state.ended); - if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); - return null; - } + var o = this.readLength(this._offset + 1); // stored in `length` - n = howMuchToRead(n, state); + if (o === null) + return null; - // if we've ended, and we're now clear, then finish it up. - if (n === 0 && state.ended) { - if (state.length === 0) endReadable(this); + if (this.length > this._size - o) return null; - } - // All the actual chunk generation logic needs to be - // *below* the call to _read. The reason is that in certain - // synthetic stream cases, such as passthrough streams, _read - // may be a completely synchronous operation which may change - // the state of the read buffer, providing enough data when - // before there was *not* enough. - // - // So, the steps are: - // 1. Figure out what the state of things will be after we do - // a read from the buffer. - // - // 2. If that resulting state will trigger a _read, then call _read. - // Note that this may be asynchronous, or synchronous. Yes, it is - // deeply ugly to write APIs this way, but that still doesn't mean - // that the Readable class should behave improperly, as streams are - // designed to be sync/async agnostic. - // Take note if the _read call is sync or async (ie, if the read call - // has returned yet), so that we know whether or not it's safe to emit - // 'readable' etc. - // - // 3. Actually pull the requested chunks out of the buffer and return. + this._offset = o; - // if we need a readable event, then we need to do some reading. - var doRead = state.needReadable; - debug('need readable', doRead); + if (this.length === 0) + return retbuf ? Buffer.alloc(0) : ''; - // if we currently have less than the highWaterMark, then also read some - if (state.length === 0 || state.length - n < state.highWaterMark) { - doRead = true; - debug('length less than watermark', doRead); - } + var str = this._buf.slice(this._offset, this._offset + this.length); + this._offset += this.length; - // however, if we've ended, then there's no point, and if we're already - // reading, then it's unnecessary. - if (state.ended || state.reading) { - doRead = false; - debug('reading or ended', doRead); - } else if (doRead) { - debug('do read'); - state.reading = true; - state.sync = true; - // if the length is currently zero, then we *need* a readable event. - if (state.length === 0) state.needReadable = true; - // call internal read method - this._read(state.highWaterMark); - state.sync = false; - // If _read pushed data synchronously, then `reading` will be false, - // and we need to re-evaluate how much data we can return to the user. - if (!state.reading) n = howMuchToRead(nOrig, state); - } + return retbuf ? str : str.toString('utf8'); +}; - var ret; - if (n > 0) ret = fromList(n, state);else ret = null; +Reader.prototype.readOID = function (tag) { + if (!tag) + tag = ASN1.OID; - if (ret === null) { - state.needReadable = true; - n = 0; - } else { - state.length -= n; - } + var b = this.readString(tag, true); + if (b === null) + return null; - if (state.length === 0) { - // If we have nothing in the buffer, then we want to know - // as soon as we *do* get something into the buffer. - if (!state.ended) state.needReadable = true; + var values = []; + var value = 0; - // If we tried to read() past the EOF, then emit end on the next tick. - if (nOrig !== n && state.ended) endReadable(this); + for (var i = 0; i < b.length; i++) { + var byte = b[i] & 0xff; + + value <<= 7; + value += byte & 0x7f; + if ((byte & 0x80) === 0) { + values.push(value); + value = 0; + } } - if (ret !== null) this.emit('data', ret); + value = values.shift(); + values.unshift(value % 40); + values.unshift((value / 40) >> 0); - return ret; + return values.join('.'); }; -function onEofChunk(stream, state) { - if (state.ended) return; - if (state.decoder) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) { - state.buffer.push(chunk); - state.length += state.objectMode ? 1 : chunk.length; - } - } - state.ended = true; - // emit 'readable' now to make sure it gets picked up. - emitReadable(stream); -} +Reader.prototype._readTag = function (tag) { + assert.ok(tag !== undefined); -// Don't emit readable right away in sync mode, because this can trigger -// another read() call => stack overflow. This way, it might trigger -// a nextTick recursion warning, but that's not so bad. -function emitReadable(stream) { - var state = stream._readableState; - state.needReadable = false; - if (!state.emittedReadable) { - debug('emitReadable', state.flowing); - state.emittedReadable = true; - if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream); - } -} + var b = this.peek(); -function emitReadable_(stream) { - debug('emit readable'); - stream.emit('readable'); - flow(stream); -} + if (b === null) + return null; -// at this point, the user has presumably seen the 'readable' event, -// and called read() to consume some data. that may have triggered -// in turn another _read(n) call, in which case reading = true if -// it's in progress. -// However, if we're not ended, or reading, and the length < hwm, -// then go ahead and try to read some more preemptively. -function maybeReadMore(stream, state) { - if (!state.readingMore) { - state.readingMore = true; - pna.nextTick(maybeReadMore_, stream, state); - } -} + if (b !== tag) + throw newInvalidAsn1Error('Expected 0x' + tag.toString(16) + + ': got 0x' + b.toString(16)); -function maybeReadMore_(stream, state) { - var len = state.length; - while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { - debug('maybeReadMore read 0'); - stream.read(0); - if (len === state.length) - // didn't get any data, stop spinning. - break;else len = state.length; + var o = this.readLength(this._offset + 1); // stored in `length` + if (o === null) + return null; + + if (this.length > 4) + throw newInvalidAsn1Error('Integer too long: ' + this.length); + + if (this.length > this._size - o) + return null; + this._offset = o; + + var fb = this._buf[this._offset]; + var value = 0; + + for (var i = 0; i < this.length; i++) { + value <<= 8; + value |= (this._buf[this._offset++] & 0xff); } - state.readingMore = false; -} -// abstract method. to be overridden in specific implementation classes. -// call cb(er, data) where data is <= n in length. -// for virtual (non-string, non-buffer) streams, "length" is somewhat -// arbitrary, and perhaps not very meaningful. -Readable.prototype._read = function (n) { - this.emit('error', new Error('_read() is not implemented')); + if ((fb & 0x80) === 0x80 && i !== 4) + value -= (1 << (i * 8)); + + return value >> 0; }; -Readable.prototype.pipe = function (dest, pipeOpts) { - var src = this; - var state = this._readableState; - switch (state.pipesCount) { - case 0: - state.pipes = dest; - break; - case 1: - state.pipes = [state.pipes, dest]; - break; - default: - state.pipes.push(dest); - break; - } - state.pipesCount += 1; - debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); - var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; +// --- Exported API - var endFn = doEnd ? onend : unpipe; - if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn); +module.exports = Reader; - dest.on('unpipe', onunpipe); - function onunpipe(readable, unpipeInfo) { - debug('onunpipe'); - if (readable === src) { - if (unpipeInfo && unpipeInfo.hasUnpiped === false) { - unpipeInfo.hasUnpiped = true; - cleanup(); - } - } - } - function onend() { - debug('onend'); - dest.end(); - } +/***/ }), - // when the dest drains, it reduces the awaitDrain counter - // on the source. This would be more elegant with a .once() - // handler in flow(), but adding and removing repeatedly is - // too slow. - var ondrain = pipeOnDrain(src); - dest.on('drain', ondrain); +/***/ 42473: +/***/ ((module) => { - var cleanedUp = false; - function cleanup() { - debug('cleanup'); - // cleanup event handlers once the pipe is broken - dest.removeListener('close', onclose); - dest.removeListener('finish', onfinish); - dest.removeListener('drain', ondrain); - dest.removeListener('error', onerror); - dest.removeListener('unpipe', onunpipe); - src.removeListener('end', onend); - src.removeListener('end', unpipe); - src.removeListener('data', ondata); +// Copyright 2011 Mark Cavage All rights reserved. - cleanedUp = true; - // if the reader is waiting for a drain event from this - // specific writer, then it would cause it to never start - // flowing again. - // So, if this is awaiting a drain, then we just call it now. - // If we don't know, then assume that we are waiting for one. - if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); - } +module.exports = { + EOC: 0, + Boolean: 1, + Integer: 2, + BitString: 3, + OctetString: 4, + Null: 5, + OID: 6, + ObjectDescriptor: 7, + External: 8, + Real: 9, // float + Enumeration: 10, + PDV: 11, + Utf8String: 12, + RelativeOID: 13, + Sequence: 16, + Set: 17, + NumericString: 18, + PrintableString: 19, + T61String: 20, + VideotexString: 21, + IA5String: 22, + UTCTime: 23, + GeneralizedTime: 24, + GraphicString: 25, + VisibleString: 26, + GeneralString: 28, + UniversalString: 29, + CharacterString: 30, + BMPString: 31, + Constructor: 32, + Context: 128 +}; - // If the user pushes more data while we're writing to dest then we'll end up - // in ondata again. However, we only want to increase awaitDrain once because - // dest will only emit one 'drain' event for the multiple writes. - // => Introduce a guard on increasing awaitDrain. - var increasedAwaitDrain = false; - src.on('data', ondata); - function ondata(chunk) { - debug('ondata'); - increasedAwaitDrain = false; - var ret = dest.write(chunk); - if (false === ret && !increasedAwaitDrain) { - // If the user unpiped during `dest.write()`, it is possible - // to get stuck in a permanently paused state if that write - // also returned false. - // => Check whether `dest` is still a piping destination. - if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { - debug('false write response, pause', src._readableState.awaitDrain); - src._readableState.awaitDrain++; - increasedAwaitDrain = true; - } - src.pause(); - } - } - // if the dest has an error, then stop piping into it. - // however, don't suppress the throwing behavior for this. - function onerror(er) { - debug('onerror', er); - unpipe(); - dest.removeListener('error', onerror); - if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); - } +/***/ }), - // Make sure our error handler is attached before userland ones. - prependListener(dest, 'error', onerror); +/***/ 43200: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // Both close and finish should trigger unpipe, but only once. - function onclose() { - dest.removeListener('finish', onfinish); - unpipe(); - } - dest.once('close', onclose); - function onfinish() { - debug('onfinish'); - dest.removeListener('close', onclose); - unpipe(); - } - dest.once('finish', onfinish); +// Copyright 2011 Mark Cavage All rights reserved. - function unpipe() { - debug('unpipe'); - src.unpipe(dest); - } +var assert = __nccwpck_require__(42357); +var Buffer = __nccwpck_require__(15118).Buffer; +var ASN1 = __nccwpck_require__(42473); +var errors = __nccwpck_require__(99348); - // tell the dest that it's being piped to - dest.emit('pipe', src); - // start the flow if it hasn't been started already. - if (!state.flowing) { - debug('pipe resume'); - src.resume(); - } +// --- Globals - return dest; +var newInvalidAsn1Error = errors.newInvalidAsn1Error; + +var DEFAULT_OPTS = { + size: 1024, + growthFactor: 8 }; -function pipeOnDrain(src) { - return function () { - var state = src._readableState; - debug('pipeOnDrain', state.awaitDrain); - if (state.awaitDrain) state.awaitDrain--; - if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { - state.flowing = true; - flow(src); - } - }; + +// --- Helpers + +function merge(from, to) { + assert.ok(from); + assert.equal(typeof (from), 'object'); + assert.ok(to); + assert.equal(typeof (to), 'object'); + + var keys = Object.getOwnPropertyNames(from); + keys.forEach(function (key) { + if (to[key]) + return; + + var value = Object.getOwnPropertyDescriptor(from, key); + Object.defineProperty(to, key, value); + }); + + return to; } -Readable.prototype.unpipe = function (dest) { - var state = this._readableState; - var unpipeInfo = { hasUnpiped: false }; - // if we're not piping anywhere, then do nothing. - if (state.pipesCount === 0) return this; - // just one destination. most common case. - if (state.pipesCount === 1) { - // passed in one, but it's not the right one. - if (dest && dest !== state.pipes) return this; +// --- API - if (!dest) dest = state.pipes; +function Writer(options) { + options = merge(DEFAULT_OPTS, options || {}); - // got a match. - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - if (dest) dest.emit('unpipe', this, unpipeInfo); - return this; - } + this._buf = Buffer.alloc(options.size || 1024); + this._size = this._buf.length; + this._offset = 0; + this._options = options; - // slow case. multiple pipe destinations. + // A list of offsets in the buffer where we need to insert + // sequence tag/len pairs. + this._seq = []; +} - if (!dest) { - // remove all. - var dests = state.pipes; - var len = state.pipesCount; - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; +Object.defineProperty(Writer.prototype, 'buffer', { + get: function () { + if (this._seq.length) + throw newInvalidAsn1Error(this._seq.length + ' unended sequence(s)'); - for (var i = 0; i < len; i++) { - dests[i].emit('unpipe', this, unpipeInfo); - }return this; + return (this._buf.slice(0, this._offset)); } +}); - // try to find the right one. - var index = indexOf(state.pipes, dest); - if (index === -1) return this; +Writer.prototype.writeByte = function (b) { + if (typeof (b) !== 'number') + throw new TypeError('argument must be a Number'); - state.pipes.splice(index, 1); - state.pipesCount -= 1; - if (state.pipesCount === 1) state.pipes = state.pipes[0]; + this._ensure(1); + this._buf[this._offset++] = b; +}; - dest.emit('unpipe', this, unpipeInfo); - return this; -}; +Writer.prototype.writeInt = function (i, tag) { + if (typeof (i) !== 'number') + throw new TypeError('argument must be a Number'); + if (typeof (tag) !== 'number') + tag = ASN1.Integer; -// set up data events if they are asked for -// Ensure readable listeners eventually get something -Readable.prototype.on = function (ev, fn) { - var res = Stream.prototype.on.call(this, ev, fn); + var sz = 4; - if (ev === 'data') { - // Start flowing on next tick if stream isn't explicitly paused - if (this._readableState.flowing !== false) this.resume(); - } else if (ev === 'readable') { - var state = this._readableState; - if (!state.endEmitted && !state.readableListening) { - state.readableListening = state.needReadable = true; - state.emittedReadable = false; - if (!state.reading) { - pna.nextTick(nReadingNextTick, this); - } else if (state.length) { - emitReadable(this); - } - } + while ((((i & 0xff800000) === 0) || ((i & 0xff800000) === 0xff800000 >> 0)) && + (sz > 1)) { + sz--; + i <<= 8; } - return res; -}; -Readable.prototype.addListener = Readable.prototype.on; + if (sz > 4) + throw newInvalidAsn1Error('BER ints cannot be > 0xffffffff'); -function nReadingNextTick(self) { - debug('readable nexttick read 0'); - self.read(0); -} + this._ensure(2 + sz); + this._buf[this._offset++] = tag; + this._buf[this._offset++] = sz; -// pause() and resume() are remnants of the legacy readable stream API -// If the user uses them, then switch into old mode. -Readable.prototype.resume = function () { - var state = this._readableState; - if (!state.flowing) { - debug('resume'); - state.flowing = true; - resume(this, state); + while (sz-- > 0) { + this._buf[this._offset++] = ((i & 0xff000000) >>> 24); + i <<= 8; } - return this; + }; -function resume(stream, state) { - if (!state.resumeScheduled) { - state.resumeScheduled = true; - pna.nextTick(resume_, stream, state); - } -} -function resume_(stream, state) { - if (!state.reading) { - debug('resume read 0'); - stream.read(0); - } +Writer.prototype.writeNull = function () { + this.writeByte(ASN1.Null); + this.writeByte(0x00); +}; - state.resumeScheduled = false; - state.awaitDrain = 0; - stream.emit('resume'); - flow(stream); - if (state.flowing && !state.reading) stream.read(0); -} -Readable.prototype.pause = function () { - debug('call pause flowing=%j', this._readableState.flowing); - if (false !== this._readableState.flowing) { - debug('pause'); - this._readableState.flowing = false; - this.emit('pause'); - } - return this; +Writer.prototype.writeEnumeration = function (i, tag) { + if (typeof (i) !== 'number') + throw new TypeError('argument must be a Number'); + if (typeof (tag) !== 'number') + tag = ASN1.Enumeration; + + return this.writeInt(i, tag); }; -function flow(stream) { - var state = stream._readableState; - debug('flow', state.flowing); - while (state.flowing && stream.read() !== null) {} -} -// wrap an old-style stream as the async data source. -// This is *not* part of the readable stream interface. -// It is an ugly unfortunate mess of history. -Readable.prototype.wrap = function (stream) { - var _this = this; +Writer.prototype.writeBoolean = function (b, tag) { + if (typeof (b) !== 'boolean') + throw new TypeError('argument must be a Boolean'); + if (typeof (tag) !== 'number') + tag = ASN1.Boolean; - var state = this._readableState; - var paused = false; + this._ensure(3); + this._buf[this._offset++] = tag; + this._buf[this._offset++] = 0x01; + this._buf[this._offset++] = b ? 0xff : 0x00; +}; - stream.on('end', function () { - debug('wrapped end'); - if (state.decoder && !state.ended) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) _this.push(chunk); - } - _this.push(null); - }); +Writer.prototype.writeString = function (s, tag) { + if (typeof (s) !== 'string') + throw new TypeError('argument must be a string (was: ' + typeof (s) + ')'); + if (typeof (tag) !== 'number') + tag = ASN1.OctetString; - stream.on('data', function (chunk) { - debug('wrapped data'); - if (state.decoder) chunk = state.decoder.write(chunk); + var len = Buffer.byteLength(s); + this.writeByte(tag); + this.writeLength(len); + if (len) { + this._ensure(len); + this._buf.write(s, this._offset); + this._offset += len; + } +}; - // don't skip over falsy values in objectMode - if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; - var ret = _this.push(chunk); - if (!ret) { - paused = true; - stream.pause(); - } +Writer.prototype.writeBuffer = function (buf, tag) { + if (typeof (tag) !== 'number') + throw new TypeError('tag must be a number'); + if (!Buffer.isBuffer(buf)) + throw new TypeError('argument must be a buffer'); + + this.writeByte(tag); + this.writeLength(buf.length); + this._ensure(buf.length); + buf.copy(this._buf, this._offset, 0, buf.length); + this._offset += buf.length; +}; + + +Writer.prototype.writeStringArray = function (strings) { + if ((!strings instanceof Array)) + throw new TypeError('argument must be an Array[String]'); + + var self = this; + strings.forEach(function (s) { + self.writeString(s); }); +}; - // proxy all the other methods. - // important when wrapping filters and duplexes. - for (var i in stream) { - if (this[i] === undefined && typeof stream[i] === 'function') { - this[i] = function (method) { - return function () { - return stream[method].apply(stream, arguments); - }; - }(i); - } - } +// This is really to solve DER cases, but whatever for now +Writer.prototype.writeOID = function (s, tag) { + if (typeof (s) !== 'string') + throw new TypeError('argument must be a string'); + if (typeof (tag) !== 'number') + tag = ASN1.OID; - // proxy certain important events. - for (var n = 0; n < kProxyEvents.length; n++) { - stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); - } + if (!/^([0-9]+\.){3,}[0-9]+$/.test(s)) + throw new Error('argument is not a valid OID string'); - // when we try to consume some more bytes, simply unpause the - // underlying stream. - this._read = function (n) { - debug('wrapped _read', n); - if (paused) { - paused = false; - stream.resume(); + function encodeOctet(bytes, octet) { + if (octet < 128) { + bytes.push(octet); + } else if (octet < 16384) { + bytes.push((octet >>> 7) | 0x80); + bytes.push(octet & 0x7F); + } else if (octet < 2097152) { + bytes.push((octet >>> 14) | 0x80); + bytes.push(((octet >>> 7) | 0x80) & 0xFF); + bytes.push(octet & 0x7F); + } else if (octet < 268435456) { + bytes.push((octet >>> 21) | 0x80); + bytes.push(((octet >>> 14) | 0x80) & 0xFF); + bytes.push(((octet >>> 7) | 0x80) & 0xFF); + bytes.push(octet & 0x7F); + } else { + bytes.push(((octet >>> 28) | 0x80) & 0xFF); + bytes.push(((octet >>> 21) | 0x80) & 0xFF); + bytes.push(((octet >>> 14) | 0x80) & 0xFF); + bytes.push(((octet >>> 7) | 0x80) & 0xFF); + bytes.push(octet & 0x7F); } - }; + } - return this; + var tmp = s.split('.'); + var bytes = []; + bytes.push(parseInt(tmp[0], 10) * 40 + parseInt(tmp[1], 10)); + tmp.slice(2).forEach(function (b) { + encodeOctet(bytes, parseInt(b, 10)); + }); + + var self = this; + this._ensure(2 + bytes.length); + this.writeByte(tag); + this.writeLength(bytes.length); + bytes.forEach(function (b) { + self.writeByte(b); + }); }; -Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function () { - return this._readableState.highWaterMark; - } -}); -// exposed for testing purposes only. -Readable._fromList = fromList; +Writer.prototype.writeLength = function (len) { + if (typeof (len) !== 'number') + throw new TypeError('argument must be a Number'); -// Pluck off n bytes from an array of buffers. -// Length is the combined lengths of all the buffers in the list. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function fromList(n, state) { - // nothing buffered - if (state.length === 0) return null; + this._ensure(4); - var ret; - if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { - // read it all, truncate the list - if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); - state.buffer.clear(); + if (len <= 0x7f) { + this._buf[this._offset++] = len; + } else if (len <= 0xff) { + this._buf[this._offset++] = 0x81; + this._buf[this._offset++] = len; + } else if (len <= 0xffff) { + this._buf[this._offset++] = 0x82; + this._buf[this._offset++] = len >> 8; + this._buf[this._offset++] = len; + } else if (len <= 0xffffff) { + this._buf[this._offset++] = 0x83; + this._buf[this._offset++] = len >> 16; + this._buf[this._offset++] = len >> 8; + this._buf[this._offset++] = len; } else { - // read part of list - ret = fromListPartial(n, state.buffer, state.decoder); + throw newInvalidAsn1Error('Length too long (> 4 bytes)'); } +}; - return ret; -} +Writer.prototype.startSequence = function (tag) { + if (typeof (tag) !== 'number') + tag = ASN1.Sequence | ASN1.Constructor; -// Extracts only enough buffered data to satisfy the amount requested. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function fromListPartial(n, list, hasStrings) { - var ret; - if (n < list.head.data.length) { - // slice is the same for buffers and strings - ret = list.head.data.slice(0, n); - list.head.data = list.head.data.slice(n); - } else if (n === list.head.data.length) { - // first chunk is a perfect match - ret = list.shift(); + this.writeByte(tag); + this._seq.push(this._offset); + this._ensure(3); + this._offset += 3; +}; + + +Writer.prototype.endSequence = function () { + var seq = this._seq.pop(); + var start = seq + 3; + var len = this._offset - start; + + if (len <= 0x7f) { + this._shift(start, len, -2); + this._buf[seq] = len; + } else if (len <= 0xff) { + this._shift(start, len, -1); + this._buf[seq] = 0x81; + this._buf[seq + 1] = len; + } else if (len <= 0xffff) { + this._buf[seq] = 0x82; + this._buf[seq + 1] = len >> 8; + this._buf[seq + 2] = len; + } else if (len <= 0xffffff) { + this._shift(start, len, 1); + this._buf[seq] = 0x83; + this._buf[seq + 1] = len >> 16; + this._buf[seq + 2] = len >> 8; + this._buf[seq + 3] = len; } else { - // result spans more than one buffer - ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); + throw newInvalidAsn1Error('Sequence too long'); } - return ret; -} +}; -// Copies a specified amount of characters from the list of buffered data -// chunks. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function copyFromBufferString(n, list) { - var p = list.head; - var c = 1; - var ret = p.data; - n -= ret.length; - while (p = p.next) { - var str = p.data; - var nb = n > str.length ? str.length : n; - if (nb === str.length) ret += str;else ret += str.slice(0, n); - n -= nb; - if (n === 0) { - if (nb === str.length) { - ++c; - if (p.next) list.head = p.next;else list.head = list.tail = null; - } else { - list.head = p; - p.data = str.slice(nb); - } - break; - } - ++c; - } - list.length -= c; - return ret; -} -// Copies a specified amount of bytes from the list of buffered data chunks. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function copyFromBuffer(n, list) { - var ret = Buffer.allocUnsafe(n); - var p = list.head; - var c = 1; - p.data.copy(ret); - n -= p.data.length; - while (p = p.next) { - var buf = p.data; - var nb = n > buf.length ? buf.length : n; - buf.copy(ret, ret.length - n, 0, nb); - n -= nb; - if (n === 0) { - if (nb === buf.length) { - ++c; - if (p.next) list.head = p.next;else list.head = list.tail = null; - } else { - list.head = p; - p.data = buf.slice(nb); - } - break; - } - ++c; - } - list.length -= c; - return ret; -} +Writer.prototype._shift = function (start, len, shift) { + assert.ok(start !== undefined); + assert.ok(len !== undefined); + assert.ok(shift); -function endReadable(stream) { - var state = stream._readableState; + this._buf.copy(this._buf, start + shift, start, start + len); + this._offset += shift; +}; - // If we get here before consuming all the bytes, then that is a - // bug in node. Should never happen. - if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); +Writer.prototype._ensure = function (len) { + assert.ok(len); - if (!state.endEmitted) { - state.ended = true; - pna.nextTick(endReadableNT, state, stream); - } -} + if (this._size - this._offset < len) { + var sz = this._size * this._options.growthFactor; + if (sz - this._offset < len) + sz += len; -function endReadableNT(state, stream) { - // Check that we didn't get one last unshift. - if (!state.endEmitted && state.length === 0) { - state.endEmitted = true; - stream.readable = false; - stream.emit('end'); - } -} + var buf = Buffer.alloc(sz); -function indexOf(xs, x) { - for (var i = 0, l = xs.length; i < l; i++) { - if (xs[i] === x) return i; + this._buf.copy(buf, 0, 0, this._offset); + this._buf = buf; + this._size = sz; } - return -1; -} +}; + + + +// --- Exported API + +module.exports = Writer; + /***/ }), -/***/ 53937: +/***/ 80970: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -// a transform stream is a readable/writable stream where you do -// something with the data. Sometimes it's called a "filter", -// but that's not a great name for it, since that implies a thing where -// some bits pass through, and others are simply ignored. (That would -// be a valid example of a transform, of course.) -// -// While the output is causally related to the input, it's not a -// necessarily symmetric or synchronous transformation. For example, -// a zlib stream might take multiple plain-text writes(), and then -// emit a single compressed chunk some time in the future. -// -// Here's how this works: -// -// The Transform stream has all the aspects of the readable and writable -// stream classes. When you write(chunk), that calls _write(chunk,cb) -// internally, and returns false if there's a lot of pending writes -// buffered up. When you call read(), that calls _read(n) until -// there's enough pending readable data buffered up. -// -// In a transform stream, the written data is placed in a buffer. When -// _read(n) is called, it transforms the queued up data, calling the -// buffered _write cb's as it consumes chunks. If consuming a single -// written chunk would result in multiple output chunks, then the first -// outputted bit calls the readcb, and subsequent chunks just go into -// the read buffer, and will cause it to emit 'readable' if necessary. -// -// This way, back-pressure is actually determined by the reading side, -// since _read has to be called to start processing a new chunk. However, -// a pathological inflate type of transform can cause excessive buffering -// here. For example, imagine a stream where every byte of input is -// interpreted as an integer from 0-255, and then results in that many -// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in -// 1kb of data being output. In this case, you could write a very small -// amount of input, and end up with a very large amount of output. In -// such a pathological inflating mechanism, there'd be no way to tell -// the system to stop doing the transform. A single 4MB write could -// cause the system to run out of memory. -// -// However, even in such a pathological case, only a single written chunk -// would be consumed, and then the rest would wait (un-transformed) until -// the results of the previous transformed chunk were consumed. +// Copyright 2011 Mark Cavage All rights reserved. +// If you have no idea what ASN.1 or BER is, see this: +// ftp://ftp.rsa.com/pub/pkcs/ascii/layman.asc +var Ber = __nccwpck_require__(194); -module.exports = Transform; -var Duplex = __nccwpck_require__(92931); -/**/ -var util = Object.create(__nccwpck_require__(95898)); -util.inherits = __nccwpck_require__(44124); -/**/ +// --- Exported API -util.inherits(Transform, Duplex); +module.exports = { -function afterTransform(er, data) { - var ts = this._transformState; - ts.transforming = false; + Ber: Ber, - var cb = ts.writecb; + BerReader: Ber.Reader, - if (!cb) { - return this.emit('error', new Error('write callback called multiple times')); - } + BerWriter: Ber.Writer - ts.writechunk = null; - ts.writecb = null; +}; - if (data != null) // single equals check for both `null` and `undefined` - this.push(data); - cb(er); +/***/ }), - var rs = this._readableState; - rs.reading = false; - if (rs.needReadable || rs.length < rs.highWaterMark) { - this._read(rs.highWaterMark); - } -} +/***/ 66631: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -function Transform(options) { - if (!(this instanceof Transform)) return new Transform(options); +// Copyright (c) 2012, Mark Cavage. All rights reserved. +// Copyright 2015 Joyent, Inc. - Duplex.call(this, options); +var assert = __nccwpck_require__(42357); +var Stream = __nccwpck_require__(92413).Stream; +var util = __nccwpck_require__(31669); - this._transformState = { - afterTransform: afterTransform.bind(this), - needTransform: false, - transforming: false, - writecb: null, - writechunk: null, - writeencoding: null - }; - // start out asking for a readable event once data is transformed. - this._readableState.needReadable = true; +///--- Globals - // we have implemented the _read method, and done the other things - // that Readable wants before the first _read call, so unset the - // sync guard flag. - this._readableState.sync = false; +/* JSSTYLED */ +var UUID_REGEXP = /^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$/; - if (options) { - if (typeof options.transform === 'function') this._transform = options.transform; - if (typeof options.flush === 'function') this._flush = options.flush; - } +///--- Internal - // When the writable side finishes, then flush out anything remaining. - this.on('prefinish', prefinish); +function _capitalize(str) { + return (str.charAt(0).toUpperCase() + str.slice(1)); } -function prefinish() { - var _this = this; - - if (typeof this._flush === 'function') { - this._flush(function (er, data) { - done(_this, er, data); +function _toss(name, expected, oper, arg, actual) { + throw new assert.AssertionError({ + message: util.format('%s (%s) is required', name, expected), + actual: (actual === undefined) ? typeof (arg) : actual(arg), + expected: expected, + operator: oper || '===', + stackStartFunction: _toss.caller }); - } else { - done(this, null, null); - } } -Transform.prototype.push = function (chunk, encoding) { - this._transformState.needTransform = false; - return Duplex.prototype.push.call(this, chunk, encoding); -}; +function _getClass(arg) { + return (Object.prototype.toString.call(arg).slice(8, -1)); +} -// This is the part where you do stuff! -// override this function in implementation classes. -// 'chunk' is an input chunk. -// -// Call `push(newChunk)` to pass along transformed output -// to the readable side. You may call 'push' zero or more times. -// -// Call `cb(err)` when you are done with this chunk. If you pass -// an error, then that'll put the hurt on the whole operation. If you -// never call cb(), then you'll never get another chunk. -Transform.prototype._transform = function (chunk, encoding, cb) { - throw new Error('_transform() is not implemented'); -}; +function noop() { + // Why even bother with asserts? +} -Transform.prototype._write = function (chunk, encoding, cb) { - var ts = this._transformState; - ts.writecb = cb; - ts.writechunk = chunk; - ts.writeencoding = encoding; - if (!ts.transforming) { - var rs = this._readableState; - if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); - } -}; -// Doesn't matter what the args are here. -// _transform does all the work. -// That we got here means that the readable side wants more data. -Transform.prototype._read = function (n) { - var ts = this._transformState; - - if (ts.writechunk !== null && ts.writecb && !ts.transforming) { - ts.transforming = true; - this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); - } else { - // mark that we need a transform, so that any data that comes in - // will get processed, now that we've asked for it. - ts.needTransform = true; - } -}; - -Transform.prototype._destroy = function (err, cb) { - var _this2 = this; +///--- Exports - Duplex.prototype._destroy.call(this, err, function (err2) { - cb(err2); - _this2.emit('close'); - }); +var types = { + bool: { + check: function (arg) { return typeof (arg) === 'boolean'; } + }, + func: { + check: function (arg) { return typeof (arg) === 'function'; } + }, + string: { + check: function (arg) { return typeof (arg) === 'string'; } + }, + object: { + check: function (arg) { + return typeof (arg) === 'object' && arg !== null; + } + }, + number: { + check: function (arg) { + return typeof (arg) === 'number' && !isNaN(arg); + } + }, + finite: { + check: function (arg) { + return typeof (arg) === 'number' && !isNaN(arg) && isFinite(arg); + } + }, + buffer: { + check: function (arg) { return Buffer.isBuffer(arg); }, + operator: 'Buffer.isBuffer' + }, + array: { + check: function (arg) { return Array.isArray(arg); }, + operator: 'Array.isArray' + }, + stream: { + check: function (arg) { return arg instanceof Stream; }, + operator: 'instanceof', + actual: _getClass + }, + date: { + check: function (arg) { return arg instanceof Date; }, + operator: 'instanceof', + actual: _getClass + }, + regexp: { + check: function (arg) { return arg instanceof RegExp; }, + operator: 'instanceof', + actual: _getClass + }, + uuid: { + check: function (arg) { + return typeof (arg) === 'string' && UUID_REGEXP.test(arg); + }, + operator: 'isUUID' + } }; -function done(stream, er, data) { - if (er) return stream.emit('error', er); - - if (data != null) // single equals check for both `null` and `undefined` - stream.push(data); - - // if there's nothing in the write buffer, then that means - // that nothing more will ever be provided - if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0'); - - if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming'); - - return stream.push(null); -} - -/***/ }), - -/***/ 7631: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +function _setExports(ndebug) { + var keys = Object.keys(types); + var out; -"use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. + /* re-export standard assert */ + if (process.env.NODE_NDEBUG) { + out = noop; + } else { + out = function (arg, msg) { + if (!arg) { + _toss(msg, 'true', arg); + } + }; + } -// A bit simpler than readable streams. -// Implement an async ._write(chunk, encoding, cb), and it'll handle all -// the drain event emission and buffering. + /* standard checks */ + keys.forEach(function (k) { + if (ndebug) { + out[k] = noop; + return; + } + var type = types[k]; + out[k] = function (arg, msg) { + if (!type.check(arg)) { + _toss(msg, k, type.operator, arg, type.actual); + } + }; + }); + /* optional checks */ + keys.forEach(function (k) { + var name = 'optional' + _capitalize(k); + if (ndebug) { + out[name] = noop; + return; + } + var type = types[k]; + out[name] = function (arg, msg) { + if (arg === undefined || arg === null) { + return; + } + if (!type.check(arg)) { + _toss(msg, k, type.operator, arg, type.actual); + } + }; + }); + /* arrayOf checks */ + keys.forEach(function (k) { + var name = 'arrayOf' + _capitalize(k); + if (ndebug) { + out[name] = noop; + return; + } + var type = types[k]; + var expected = '[' + k + ']'; + out[name] = function (arg, msg) { + if (!Array.isArray(arg)) { + _toss(msg, expected, type.operator, arg, type.actual); + } + var i; + for (i = 0; i < arg.length; i++) { + if (!type.check(arg[i])) { + _toss(msg, expected, type.operator, arg, type.actual); + } + } + }; + }); -/**/ + /* optionalArrayOf checks */ + keys.forEach(function (k) { + var name = 'optionalArrayOf' + _capitalize(k); + if (ndebug) { + out[name] = noop; + return; + } + var type = types[k]; + var expected = '[' + k + ']'; + out[name] = function (arg, msg) { + if (arg === undefined || arg === null) { + return; + } + if (!Array.isArray(arg)) { + _toss(msg, expected, type.operator, arg, type.actual); + } + var i; + for (i = 0; i < arg.length; i++) { + if (!type.check(arg[i])) { + _toss(msg, expected, type.operator, arg, type.actual); + } + } + }; + }); -var pna = __nccwpck_require__(47810); -/**/ + /* re-export built-in assertions */ + Object.keys(assert).forEach(function (k) { + if (k === 'AssertionError') { + out[k] = assert[k]; + return; + } + if (ndebug) { + out[k] = noop; + return; + } + out[k] = assert[k]; + }); -module.exports = Writable; + /* export ourselves (for unit tests _only_) */ + out._setExports = _setExports; -/* */ -function WriteReq(chunk, encoding, cb) { - this.chunk = chunk; - this.encoding = encoding; - this.callback = cb; - this.next = null; + return out; } -// It seems a linked list but it is not -// there will be only 2 of these for each stream -function CorkedRequest(state) { - var _this = this; +module.exports = _setExports(process.env.NODE_NDEBUG); - this.next = null; - this.entry = null; - this.finish = function () { - onCorkedFinish(_this, state); - }; -} -/* */ -/**/ -var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick; -/**/ +/***/ }), -/**/ -var Duplex; -/**/ +/***/ 57888: +/***/ (function(__unused_webpack_module, exports) { -Writable.WritableState = WritableState; +(function (global, factory) { + true ? factory(exports) : + 0; +}(this, (function (exports) { 'use strict'; -/**/ -var util = Object.create(__nccwpck_require__(95898)); -util.inherits = __nccwpck_require__(44124); -/**/ + /** + * Creates a continuation function with some arguments already applied. + * + * Useful as a shorthand when combined with other control flow functions. Any + * arguments passed to the returned function are added to the arguments + * originally passed to apply. + * + * @name apply + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {Function} fn - The function you want to eventually apply all + * arguments to. Invokes with (arguments...). + * @param {...*} arguments... - Any number of arguments to automatically apply + * when the continuation is called. + * @returns {Function} the partially-applied function + * @example + * + * // using apply + * async.parallel([ + * async.apply(fs.writeFile, 'testfile1', 'test1'), + * async.apply(fs.writeFile, 'testfile2', 'test2') + * ]); + * + * + * // the same process without using apply + * async.parallel([ + * function(callback) { + * fs.writeFile('testfile1', 'test1', callback); + * }, + * function(callback) { + * fs.writeFile('testfile2', 'test2', callback); + * } + * ]); + * + * // It's possible to pass any number of additional arguments when calling the + * // continuation: + * + * node> var fn = async.apply(sys.puts, 'one'); + * node> fn('two', 'three'); + * one + * two + * three + */ + function apply(fn, ...args) { + return (...callArgs) => fn(...args,...callArgs); + } -/**/ -var internalUtil = { - deprecate: __nccwpck_require__(65278) -}; -/**/ + function initialParams (fn) { + return function (...args/*, callback*/) { + var callback = args.pop(); + return fn.call(this, args, callback); + }; + } -/**/ -var Stream = __nccwpck_require__(69777); -/**/ + /* istanbul ignore file */ -/**/ + var hasSetImmediate = typeof setImmediate === 'function' && setImmediate; + var hasNextTick = typeof process === 'object' && typeof process.nextTick === 'function'; -var Buffer = __nccwpck_require__(95237).Buffer; -var OurUint8Array = global.Uint8Array || function () {}; -function _uint8ArrayToBuffer(chunk) { - return Buffer.from(chunk); -} -function _isUint8Array(obj) { - return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; -} + function fallback(fn) { + setTimeout(fn, 0); + } -/**/ + function wrap(defer) { + return (fn, ...args) => defer(() => fn(...args)); + } -var destroyImpl = __nccwpck_require__(69147); + var _defer; -util.inherits(Writable, Stream); + if (hasSetImmediate) { + _defer = setImmediate; + } else if (hasNextTick) { + _defer = process.nextTick; + } else { + _defer = fallback; + } -function nop() {} + var setImmediate$1 = wrap(_defer); -function WritableState(options, stream) { - Duplex = Duplex || __nccwpck_require__(92931); + /** + * Take a sync function and make it async, passing its return value to a + * callback. This is useful for plugging sync functions into a waterfall, + * series, or other async functions. Any arguments passed to the generated + * function will be passed to the wrapped function (except for the final + * callback argument). Errors thrown will be passed to the callback. + * + * If the function passed to `asyncify` returns a Promise, that promises's + * resolved/rejected state will be used to call the callback, rather than simply + * the synchronous return value. + * + * This also means you can asyncify ES2017 `async` functions. + * + * @name asyncify + * @static + * @memberOf module:Utils + * @method + * @alias wrapSync + * @category Util + * @param {Function} func - The synchronous function, or Promise-returning + * function to convert to an {@link AsyncFunction}. + * @returns {AsyncFunction} An asynchronous wrapper of the `func`. To be + * invoked with `(args..., callback)`. + * @example + * + * // passing a regular synchronous function + * async.waterfall([ + * async.apply(fs.readFile, filename, "utf8"), + * async.asyncify(JSON.parse), + * function (data, next) { + * // data is the result of parsing the text. + * // If there was a parsing error, it would have been caught. + * } + * ], callback); + * + * // passing a function returning a promise + * async.waterfall([ + * async.apply(fs.readFile, filename, "utf8"), + * async.asyncify(function (contents) { + * return db.model.create(contents); + * }), + * function (model, next) { + * // `model` is the instantiated model object. + * // If there was an error, this function would be skipped. + * } + * ], callback); + * + * // es2017 example, though `asyncify` is not needed if your JS environment + * // supports async functions out of the box + * var q = async.queue(async.asyncify(async function(file) { + * var intermediateStep = await processFile(file); + * return await somePromise(intermediateStep) + * })); + * + * q.push(files); + */ + function asyncify(func) { + if (isAsync(func)) { + return function (...args/*, callback*/) { + const callback = args.pop(); + const promise = func.apply(this, args); + return handlePromise(promise, callback) + } + } - options = options || {}; + return initialParams(function (args, callback) { + var result; + try { + result = func.apply(this, args); + } catch (e) { + return callback(e); + } + // if result is Promise object + if (result && typeof result.then === 'function') { + return handlePromise(result, callback) + } else { + callback(null, result); + } + }); + } - // Duplex streams are both readable and writable, but share - // the same options object. - // However, some cases require setting options to different - // values for the readable and the writable sides of the duplex stream. - // These options can be provided separately as readableXXX and writableXXX. - var isDuplex = stream instanceof Duplex; + function handlePromise(promise, callback) { + return promise.then(value => { + invokeCallback(callback, null, value); + }, err => { + invokeCallback(callback, err && err.message ? err : new Error(err)); + }); + } - // object stream flag to indicate whether or not this stream - // contains buffers or objects. - this.objectMode = !!options.objectMode; + function invokeCallback(callback, error, value) { + try { + callback(error, value); + } catch (err) { + setImmediate$1(e => { throw e }, err); + } + } - if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + function isAsync(fn) { + return fn[Symbol.toStringTag] === 'AsyncFunction'; + } - // the point at which write() starts returning false - // Note: 0 is a valid value, means that we always return false if - // the entire buffer is not flushed immediately on write() - var hwm = options.highWaterMark; - var writableHwm = options.writableHighWaterMark; - var defaultHwm = this.objectMode ? 16 : 16 * 1024; + function isAsyncGenerator(fn) { + return fn[Symbol.toStringTag] === 'AsyncGenerator'; + } - if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm; + function isAsyncIterable(obj) { + return typeof obj[Symbol.asyncIterator] === 'function'; + } - // cast to ints. - this.highWaterMark = Math.floor(this.highWaterMark); + function wrapAsync(asyncFn) { + if (typeof asyncFn !== 'function') throw new Error('expected a function') + return isAsync(asyncFn) ? asyncify(asyncFn) : asyncFn; + } - // if _final has been called - this.finalCalled = false; + // conditionally promisify a function. + // only return a promise if a callback is omitted + function awaitify (asyncFn, arity = asyncFn.length) { + if (!arity) throw new Error('arity is undefined') + function awaitable (...args) { + if (typeof args[arity - 1] === 'function') { + return asyncFn.apply(this, args) + } - // drain event flag. - this.needDrain = false; - // at the start of calling end() - this.ending = false; - // when end() has been called, and returned - this.ended = false; - // when 'finish' is emitted - this.finished = false; + return new Promise((resolve, reject) => { + args[arity - 1] = (err, ...cbArgs) => { + if (err) return reject(err) + resolve(cbArgs.length > 1 ? cbArgs : cbArgs[0]); + }; + asyncFn.apply(this, args); + }) + } - // has it been destroyed - this.destroyed = false; + return awaitable + } - // should we decode strings into buffers before passing to _write? - // this is here so that some node-core streams can optimize string - // handling at a lower level. - var noDecode = options.decodeStrings === false; - this.decodeStrings = !noDecode; + function applyEach (eachfn) { + return function applyEach(fns, ...callArgs) { + const go = awaitify(function (callback) { + var that = this; + return eachfn(fns, (fn, cb) => { + wrapAsync(fn).apply(that, callArgs.concat(cb)); + }, callback); + }); + return go; + }; + } - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = options.defaultEncoding || 'utf8'; + function _asyncMap(eachfn, arr, iteratee, callback) { + arr = arr || []; + var results = []; + var counter = 0; + var _iteratee = wrapAsync(iteratee); - // not an actual buffer we keep track of, but a measurement - // of how much we're waiting to get pushed to some underlying - // socket or file. - this.length = 0; + return eachfn(arr, (value, _, iterCb) => { + var index = counter++; + _iteratee(value, (err, v) => { + results[index] = v; + iterCb(err); + }); + }, err => { + callback(err, results); + }); + } - // a flag to see when we're in the middle of a write. - this.writing = false; + function isArrayLike(value) { + return value && + typeof value.length === 'number' && + value.length >= 0 && + value.length % 1 === 0; + } - // when true all writes will be buffered until .uncork() call - this.corked = 0; + // A temporary value used to identify if the loop should be broken. + // See #1064, #1293 + const breakLoop = {}; - // a flag to be able to tell if the onwrite cb is called immediately, - // or on a later tick. We set this to true at first, because any - // actions that shouldn't happen until "later" should generally also - // not happen before the first write call. - this.sync = true; + function once(fn) { + function wrapper (...args) { + if (fn === null) return; + var callFn = fn; + fn = null; + callFn.apply(this, args); + } + Object.assign(wrapper, fn); + return wrapper + } - // a flag to know if we're processing previously buffered items, which - // may call the _write() callback in the same tick, so that we don't - // end up in an overlapped onwrite situation. - this.bufferProcessing = false; + function getIterator (coll) { + return coll[Symbol.iterator] && coll[Symbol.iterator](); + } - // the callback that's passed to _write(chunk,cb) - this.onwrite = function (er) { - onwrite(stream, er); - }; + function createArrayIterator(coll) { + var i = -1; + var len = coll.length; + return function next() { + return ++i < len ? {value: coll[i], key: i} : null; + } + } - // the callback that the user supplies to write(chunk,encoding,cb) - this.writecb = null; + function createES2015Iterator(iterator) { + var i = -1; + return function next() { + var item = iterator.next(); + if (item.done) + return null; + i++; + return {value: item.value, key: i}; + } + } - // the amount that is being written when _write is called. - this.writelen = 0; + function createObjectIterator(obj) { + var okeys = obj ? Object.keys(obj) : []; + var i = -1; + var len = okeys.length; + return function next() { + var key = okeys[++i]; + return i < len ? {value: obj[key], key} : null; + }; + } - this.bufferedRequest = null; - this.lastBufferedRequest = null; + function createIterator(coll) { + if (isArrayLike(coll)) { + return createArrayIterator(coll); + } - // number of pending user-supplied write callbacks - // this must be 0 before 'finish' can be emitted - this.pendingcb = 0; + var iterator = getIterator(coll); + return iterator ? createES2015Iterator(iterator) : createObjectIterator(coll); + } - // emit prefinish if the only thing we're waiting for is _write cbs - // This is relevant for synchronous Transform streams - this.prefinished = false; + function onlyOnce(fn) { + return function (...args) { + if (fn === null) throw new Error("Callback was already called."); + var callFn = fn; + fn = null; + callFn.apply(this, args); + }; + } - // True if the error was already emitted and should not be thrown again - this.errorEmitted = false; + // for async generators + function asyncEachOfLimit(generator, limit, iteratee, callback) { + let done = false; + let canceled = false; + let awaiting = false; + let running = 0; + let idx = 0; - // count buffered requests - this.bufferedRequestCount = 0; + function replenish() { + //console.log('replenish') + if (running >= limit || awaiting || done) return + //console.log('replenish awaiting') + awaiting = true; + generator.next().then(({value, done: iterDone}) => { + //console.log('got value', value) + if (canceled || done) return + awaiting = false; + if (iterDone) { + done = true; + if (running <= 0) { + //console.log('done nextCb') + callback(null); + } + return; + } + running++; + iteratee(value, idx, iterateeCallback); + idx++; + replenish(); + }).catch(handleError); + } - // allocate the first CorkedRequest, there is always - // one allocated and free to use, and we maintain at most two - this.corkedRequestsFree = new CorkedRequest(this); -} + function iterateeCallback(err, result) { + //console.log('iterateeCallback') + running -= 1; + if (canceled) return + if (err) return handleError(err) -WritableState.prototype.getBuffer = function getBuffer() { - var current = this.bufferedRequest; - var out = []; - while (current) { - out.push(current); - current = current.next; - } - return out; -}; + if (err === false) { + done = true; + canceled = true; + return + } -(function () { - try { - Object.defineProperty(WritableState.prototype, 'buffer', { - get: internalUtil.deprecate(function () { - return this.getBuffer(); - }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') - }); - } catch (_) {} -})(); + if (result === breakLoop || (done && running <= 0)) { + done = true; + //console.log('done iterCb') + return callback(null); + } + replenish(); + } -// Test _writableState for inheritance to account for Duplex streams, -// whose prototype chain only points to Readable. -var realHasInstance; -if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { - realHasInstance = Function.prototype[Symbol.hasInstance]; - Object.defineProperty(Writable, Symbol.hasInstance, { - value: function (object) { - if (realHasInstance.call(this, object)) return true; - if (this !== Writable) return false; + function handleError(err) { + if (canceled) return + awaiting = false; + done = true; + callback(err); + } - return object && object._writableState instanceof WritableState; + replenish(); } - }); -} else { - realHasInstance = function (object) { - return object instanceof this; - }; -} -function Writable(options) { - Duplex = Duplex || __nccwpck_require__(92931); + var eachOfLimit = (limit) => { + return (obj, iteratee, callback) => { + callback = once(callback); + if (limit <= 0) { + throw new RangeError('concurrency limit cannot be less than 1') + } + if (!obj) { + return callback(null); + } + if (isAsyncGenerator(obj)) { + return asyncEachOfLimit(obj, limit, iteratee, callback) + } + if (isAsyncIterable(obj)) { + return asyncEachOfLimit(obj[Symbol.asyncIterator](), limit, iteratee, callback) + } + var nextElem = createIterator(obj); + var done = false; + var canceled = false; + var running = 0; + var looping = false; - // Writable ctor is applied to Duplexes, too. - // `realHasInstance` is necessary because using plain `instanceof` - // would return false, as no `_writableState` property is attached. + function iterateeCallback(err, value) { + if (canceled) return + running -= 1; + if (err) { + done = true; + callback(err); + } + else if (err === false) { + done = true; + canceled = true; + } + else if (value === breakLoop || (done && running <= 0)) { + done = true; + return callback(null); + } + else if (!looping) { + replenish(); + } + } - // Trying to use the custom `instanceof` for Writable here will also break the - // Node.js LazyTransform implementation, which has a non-trivial getter for - // `_writableState` that would lead to infinite recursion. - if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) { - return new Writable(options); - } + function replenish () { + looping = true; + while (running < limit && !done) { + var elem = nextElem(); + if (elem === null) { + done = true; + if (running <= 0) { + callback(null); + } + return; + } + running += 1; + iteratee(elem.value, elem.key, onlyOnce(iterateeCallback)); + } + looping = false; + } - this._writableState = new WritableState(options, this); + replenish(); + }; + }; - // legacy. - this.writable = true; + /** + * The same as [`eachOf`]{@link module:Collections.eachOf} but runs a maximum of `limit` async operations at a + * time. + * + * @name eachOfLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.eachOf]{@link module:Collections.eachOf} + * @alias forEachOfLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each + * item in `coll`. The `key` is the item's key, or index in the case of an + * array. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ + function eachOfLimit$1(coll, limit, iteratee, callback) { + return eachOfLimit(limit)(coll, wrapAsync(iteratee), callback); + } - if (options) { - if (typeof options.write === 'function') this._write = options.write; + var eachOfLimit$2 = awaitify(eachOfLimit$1, 4); - if (typeof options.writev === 'function') this._writev = options.writev; + // eachOf implementation optimized for array-likes + function eachOfArrayLike(coll, iteratee, callback) { + callback = once(callback); + var index = 0, + completed = 0, + {length} = coll, + canceled = false; + if (length === 0) { + callback(null); + } - if (typeof options.destroy === 'function') this._destroy = options.destroy; + function iteratorCallback(err, value) { + if (err === false) { + canceled = true; + } + if (canceled === true) return + if (err) { + callback(err); + } else if ((++completed === length) || value === breakLoop) { + callback(null); + } + } - if (typeof options.final === 'function') this._final = options.final; - } + for (; index < length; index++) { + iteratee(coll[index], index, onlyOnce(iteratorCallback)); + } + } - Stream.call(this); -} + // a generic version of eachOf which can handle array, object, and iterator cases. + function eachOfGeneric (coll, iteratee, callback) { + return eachOfLimit$2(coll, Infinity, iteratee, callback); + } -// Otherwise people can pipe Writable streams, which is just wrong. -Writable.prototype.pipe = function () { - this.emit('error', new Error('Cannot pipe, not readable')); -}; + /** + * Like [`each`]{@link module:Collections.each}, except that it passes the key (or index) as the second argument + * to the iteratee. + * + * @name eachOf + * @static + * @memberOf module:Collections + * @method + * @alias forEachOf + * @category Collection + * @see [async.each]{@link module:Collections.each} + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each + * item in `coll`. + * The `key` is the item's key, or index in the case of an array. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * var obj = {dev: "/dev.json", test: "/test.json", prod: "/prod.json"}; + * var configs = {}; + * + * async.forEachOf(obj, function (value, key, callback) { + * fs.readFile(__dirname + value, "utf8", function (err, data) { + * if (err) return callback(err); + * try { + * configs[key] = JSON.parse(data); + * } catch (e) { + * return callback(e); + * } + * callback(); + * }); + * }, function (err) { + * if (err) console.error(err.message); + * // configs is now a map of JSON data + * doSomethingWith(configs); + * }); + */ + function eachOf(coll, iteratee, callback) { + var eachOfImplementation = isArrayLike(coll) ? eachOfArrayLike : eachOfGeneric; + return eachOfImplementation(coll, wrapAsync(iteratee), callback); + } -function writeAfterEnd(stream, cb) { - var er = new Error('write after end'); - // TODO: defer error events consistently everywhere, not just the cb - stream.emit('error', er); - pna.nextTick(cb, er); -} + var eachOf$1 = awaitify(eachOf, 3); -// Checks that a user-supplied chunk is valid, especially for the particular -// mode the stream is in. Currently this means that `null` is never accepted -// and undefined/non-string values are only allowed in object mode. -function validChunk(stream, state, chunk, cb) { - var valid = true; - var er = false; + /** + * Produces a new collection of values by mapping each value in `coll` through + * the `iteratee` function. The `iteratee` is called with an item from `coll` + * and a callback for when it has finished processing. Each of these callback + * takes 2 arguments: an `error`, and the transformed item from `coll`. If + * `iteratee` passes an error to its callback, the main `callback` (for the + * `map` function) is immediately called with the error. + * + * Note, that since this function applies the `iteratee` to each item in + * parallel, there is no guarantee that the `iteratee` functions will complete + * in order. However, the results array will be in the same order as the + * original `coll`. + * + * If `map` is passed an Object, the results will be an Array. The results + * will roughly be in the order of the original Objects' keys (but this can + * vary across JavaScript engines). + * + * @name map + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an Array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * async.map(['file1','file2','file3'], fs.stat, function(err, results) { + * // results is now an array of stats for each file + * }); + */ + function map (coll, iteratee, callback) { + return _asyncMap(eachOf$1, coll, iteratee, callback) + } + var map$1 = awaitify(map, 3); - if (chunk === null) { - er = new TypeError('May not write null values to stream'); - } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { - er = new TypeError('Invalid non-string/buffer chunk'); - } - if (er) { - stream.emit('error', er); - pna.nextTick(cb, er); - valid = false; - } - return valid; -} + /** + * Applies the provided arguments to each function in the array, calling + * `callback` after all functions have completed. If you only provide the first + * argument, `fns`, then it will return a function which lets you pass in the + * arguments as if it were a single function call. If more arguments are + * provided, `callback` is required while `args` is still optional. The results + * for each of the applied async functions are passed to the final callback + * as an array. + * + * @name applyEach + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s + * to all call with the same arguments + * @param {...*} [args] - any number of separate arguments to pass to the + * function. + * @param {Function} [callback] - the final argument should be the callback, + * called when all functions have completed processing. + * @returns {AsyncFunction} - Returns a function that takes no args other than + * an optional callback, that is the result of applying the `args` to each + * of the functions. + * @example + * + * const appliedFn = async.applyEach([enableSearch, updateSchema], 'bucket') + * + * appliedFn((err, results) => { + * // results[0] is the results for `enableSearch` + * // results[1] is the results for `updateSchema` + * }); + * + * // partial application example: + * async.each( + * buckets, + * async (bucket) => async.applyEach([enableSearch, updateSchema], bucket)(), + * callback + * ); + */ + var applyEach$1 = applyEach(map$1); -Writable.prototype.write = function (chunk, encoding, cb) { - var state = this._writableState; - var ret = false; - var isBuf = !state.objectMode && _isUint8Array(chunk); + /** + * The same as [`eachOf`]{@link module:Collections.eachOf} but runs only a single async operation at a time. + * + * @name eachOfSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.eachOf]{@link module:Collections.eachOf} + * @alias forEachOfSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ + function eachOfSeries(coll, iteratee, callback) { + return eachOfLimit$2(coll, 1, iteratee, callback) + } + var eachOfSeries$1 = awaitify(eachOfSeries, 3); - if (isBuf && !Buffer.isBuffer(chunk)) { - chunk = _uint8ArrayToBuffer(chunk); - } + /** + * The same as [`map`]{@link module:Collections.map} but runs only a single async operation at a time. + * + * @name mapSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.map]{@link module:Collections.map} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ + function mapSeries (coll, iteratee, callback) { + return _asyncMap(eachOfSeries$1, coll, iteratee, callback) + } + var mapSeries$1 = awaitify(mapSeries, 3); - if (typeof encoding === 'function') { - cb = encoding; - encoding = null; - } + /** + * The same as [`applyEach`]{@link module:ControlFlow.applyEach} but runs only a single async operation at a time. + * + * @name applyEachSeries + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.applyEach]{@link module:ControlFlow.applyEach} + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s to all + * call with the same arguments + * @param {...*} [args] - any number of separate arguments to pass to the + * function. + * @param {Function} [callback] - the final argument should be the callback, + * called when all functions have completed processing. + * @returns {AsyncFunction} - A function, that when called, is the result of + * appling the `args` to the list of functions. It takes no args, other than + * a callback. + */ + var applyEachSeries = applyEach(mapSeries$1); - if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + const PROMISE_SYMBOL = Symbol('promiseCallback'); - if (typeof cb !== 'function') cb = nop; + function promiseCallback () { + let resolve, reject; + function callback (err, ...args) { + if (err) return reject(err) + resolve(args.length > 1 ? args : args[0]); + } - if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { - state.pendingcb++; - ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); - } + callback[PROMISE_SYMBOL] = new Promise((res, rej) => { + resolve = res, + reject = rej; + }); - return ret; -}; + return callback + } -Writable.prototype.cork = function () { - var state = this._writableState; + /** + * Determines the best order for running the {@link AsyncFunction}s in `tasks`, based on + * their requirements. Each function can optionally depend on other functions + * being completed first, and each function is run as soon as its requirements + * are satisfied. + * + * If any of the {@link AsyncFunction}s pass an error to their callback, the `auto` sequence + * will stop. Further tasks will not execute (so any other functions depending + * on it will not run), and the main `callback` is immediately called with the + * error. + * + * {@link AsyncFunction}s also receive an object containing the results of functions which + * have completed so far as the first argument, if they have dependencies. If a + * task function has no dependencies, it will only be passed a callback. + * + * @name auto + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Object} tasks - An object. Each of its properties is either a + * function or an array of requirements, with the {@link AsyncFunction} itself the last item + * in the array. The object's key of a property serves as the name of the task + * defined by that property, i.e. can be used when specifying requirements for + * other tasks. The function receives one or two arguments: + * * a `results` object, containing the results of the previously executed + * functions, only passed if the task has any dependencies, + * * a `callback(err, result)` function, which must be called when finished, + * passing an `error` (which can be `null`) and the result of the function's + * execution. + * @param {number} [concurrency=Infinity] - An optional `integer` for + * determining the maximum number of tasks that can be run in parallel. By + * default, as many as possible. + * @param {Function} [callback] - An optional callback which is called when all + * the tasks have been completed. It receives the `err` argument if any `tasks` + * pass an error to their callback. Results are always returned; however, if an + * error occurs, no further `tasks` will be performed, and the results object + * will only contain partial results. Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + * @example + * + * async.auto({ + * // this function will just be passed a callback + * readData: async.apply(fs.readFile, 'data.txt', 'utf-8'), + * showData: ['readData', function(results, cb) { + * // results.readData is the file's contents + * // ... + * }] + * }, callback); + * + * async.auto({ + * get_data: function(callback) { + * console.log('in get_data'); + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * console.log('in make_folder'); + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * console.log('in write_file', JSON.stringify(results)); + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * console.log('in email_link', JSON.stringify(results)); + * // once the file is written let's email a link to it... + * // results.write_file contains the filename returned by write_file. + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }, function(err, results) { + * console.log('err = ', err); + * console.log('results = ', results); + * }); + */ + function auto(tasks, concurrency, callback) { + if (typeof concurrency !== 'number') { + // concurrency is optional, shift the args. + callback = concurrency; + concurrency = null; + } + callback = once(callback || promiseCallback()); + var numTasks = Object.keys(tasks).length; + if (!numTasks) { + return callback(null); + } + if (!concurrency) { + concurrency = numTasks; + } - state.corked++; -}; + var results = {}; + var runningTasks = 0; + var canceled = false; + var hasError = false; -Writable.prototype.uncork = function () { - var state = this._writableState; + var listeners = Object.create(null); - if (state.corked) { - state.corked--; + var readyTasks = []; - if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); - } -}; + // for cycle detection: + var readyToCheck = []; // tasks that have been identified as reachable + // without the possibility of returning to an ancestor task + var uncheckedDependencies = {}; -Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { - // node::ParseEncoding() requires lower case. - if (typeof encoding === 'string') encoding = encoding.toLowerCase(); - if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); - this._writableState.defaultEncoding = encoding; - return this; -}; + Object.keys(tasks).forEach(key => { + var task = tasks[key]; + if (!Array.isArray(task)) { + // no dependencies + enqueueTask(key, [task]); + readyToCheck.push(key); + return; + } -function decodeChunk(state, chunk, encoding) { - if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { - chunk = Buffer.from(chunk, encoding); - } - return chunk; -} + var dependencies = task.slice(0, task.length - 1); + var remainingDependencies = dependencies.length; + if (remainingDependencies === 0) { + enqueueTask(key, task); + readyToCheck.push(key); + return; + } + uncheckedDependencies[key] = remainingDependencies; -Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function () { - return this._writableState.highWaterMark; - } -}); + dependencies.forEach(dependencyName => { + if (!tasks[dependencyName]) { + throw new Error('async.auto task `' + key + + '` has a non-existent dependency `' + + dependencyName + '` in ' + + dependencies.join(', ')); + } + addListener(dependencyName, () => { + remainingDependencies--; + if (remainingDependencies === 0) { + enqueueTask(key, task); + } + }); + }); + }); -// if we're already writing something, then just put this -// in the queue, and wait our turn. Otherwise, call _write -// If we return false, then we need a drain event, so set that flag. -function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { - if (!isBuf) { - var newChunk = decodeChunk(state, chunk, encoding); - if (chunk !== newChunk) { - isBuf = true; - encoding = 'buffer'; - chunk = newChunk; - } - } - var len = state.objectMode ? 1 : chunk.length; + checkForDeadlocks(); + processQueue(); - state.length += len; + function enqueueTask(key, task) { + readyTasks.push(() => runTask(key, task)); + } - var ret = state.length < state.highWaterMark; - // we must ensure that previous needDrain will not be reset to false. - if (!ret) state.needDrain = true; + function processQueue() { + if (canceled) return + if (readyTasks.length === 0 && runningTasks === 0) { + return callback(null, results); + } + while(readyTasks.length && runningTasks < concurrency) { + var run = readyTasks.shift(); + run(); + } - if (state.writing || state.corked) { - var last = state.lastBufferedRequest; - state.lastBufferedRequest = { - chunk: chunk, - encoding: encoding, - isBuf: isBuf, - callback: cb, - next: null - }; - if (last) { - last.next = state.lastBufferedRequest; - } else { - state.bufferedRequest = state.lastBufferedRequest; - } - state.bufferedRequestCount += 1; - } else { - doWrite(stream, state, false, len, chunk, encoding, cb); - } + } - return ret; -} + function addListener(taskName, fn) { + var taskListeners = listeners[taskName]; + if (!taskListeners) { + taskListeners = listeners[taskName] = []; + } -function doWrite(stream, state, writev, len, chunk, encoding, cb) { - state.writelen = len; - state.writecb = cb; - state.writing = true; - state.sync = true; - if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); - state.sync = false; -} + taskListeners.push(fn); + } -function onwriteError(stream, state, sync, er, cb) { - --state.pendingcb; + function taskComplete(taskName) { + var taskListeners = listeners[taskName] || []; + taskListeners.forEach(fn => fn()); + processQueue(); + } - if (sync) { - // defer the callback if we are being called synchronously - // to avoid piling up things on the stack - pna.nextTick(cb, er); - // this can emit finish, and it will always happen - // after error - pna.nextTick(finishMaybe, stream, state); - stream._writableState.errorEmitted = true; - stream.emit('error', er); - } else { - // the caller expect this to happen before if - // it is async - cb(er); - stream._writableState.errorEmitted = true; - stream.emit('error', er); - // this can emit finish, but finish must - // always follow error - finishMaybe(stream, state); - } -} -function onwriteStateUpdate(state) { - state.writing = false; - state.writecb = null; - state.length -= state.writelen; - state.writelen = 0; -} + function runTask(key, task) { + if (hasError) return; -function onwrite(stream, er) { - var state = stream._writableState; - var sync = state.sync; - var cb = state.writecb; + var taskCallback = onlyOnce((err, ...result) => { + runningTasks--; + if (err === false) { + canceled = true; + return + } + if (result.length < 2) { + [result] = result; + } + if (err) { + var safeResults = {}; + Object.keys(results).forEach(rkey => { + safeResults[rkey] = results[rkey]; + }); + safeResults[key] = result; + hasError = true; + listeners = Object.create(null); + if (canceled) return + callback(err, safeResults); + } else { + results[key] = result; + taskComplete(key); + } + }); - onwriteStateUpdate(state); + runningTasks++; + var taskFn = wrapAsync(task[task.length - 1]); + if (task.length > 1) { + taskFn(results, taskCallback); + } else { + taskFn(taskCallback); + } + } - if (er) onwriteError(stream, state, sync, er, cb);else { - // Check if we're actually ready to finish, but don't emit yet - var finished = needFinish(state); + function checkForDeadlocks() { + // Kahn's algorithm + // https://en.wikipedia.org/wiki/Topological_sorting#Kahn.27s_algorithm + // http://connalle.blogspot.com/2013/10/topological-sortingkahn-algorithm.html + var currentTask; + var counter = 0; + while (readyToCheck.length) { + currentTask = readyToCheck.pop(); + counter++; + getDependents(currentTask).forEach(dependent => { + if (--uncheckedDependencies[dependent] === 0) { + readyToCheck.push(dependent); + } + }); + } - if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { - clearBuffer(stream, state); + if (counter !== numTasks) { + throw new Error( + 'async.auto cannot execute tasks due to a recursive dependency' + ); + } + } + + function getDependents(taskName) { + var result = []; + Object.keys(tasks).forEach(key => { + const task = tasks[key]; + if (Array.isArray(task) && task.indexOf(taskName) >= 0) { + result.push(key); + } + }); + return result; + } + + return callback[PROMISE_SYMBOL] } - if (sync) { - /**/ - asyncWrite(afterWrite, stream, state, finished, cb); - /**/ - } else { - afterWrite(stream, state, finished, cb); + var FN_ARGS = /^(?:async\s+)?(?:function)?\s*\w*\s*\(\s*([^)]+)\s*\)(?:\s*{)/; + var ARROW_FN_ARGS = /^(?:async\s+)?\(?\s*([^)=]+)\s*\)?(?:\s*=>)/; + var FN_ARG_SPLIT = /,/; + var FN_ARG = /(=.+)?(\s*)$/; + var STRIP_COMMENTS = /((\/\/.*$)|(\/\*[\s\S]*?\*\/))/mg; + + function parseParams(func) { + const src = func.toString().replace(STRIP_COMMENTS, ''); + let match = src.match(FN_ARGS); + if (!match) { + match = src.match(ARROW_FN_ARGS); + } + if (!match) throw new Error('could not parse args in autoInject\nSource:\n' + src) + let [, args] = match; + return args + .replace(/\s/g, '') + .split(FN_ARG_SPLIT) + .map((arg) => arg.replace(FN_ARG, '').trim()); } - } -} -function afterWrite(stream, state, finished, cb) { - if (!finished) onwriteDrain(stream, state); - state.pendingcb--; - cb(); - finishMaybe(stream, state); -} + /** + * A dependency-injected version of the [async.auto]{@link module:ControlFlow.auto} function. Dependent + * tasks are specified as parameters to the function, after the usual callback + * parameter, with the parameter names matching the names of the tasks it + * depends on. This can provide even more readable task graphs which can be + * easier to maintain. + * + * If a final callback is specified, the task results are similarly injected, + * specified as named parameters after the initial error parameter. + * + * The autoInject function is purely syntactic sugar and its semantics are + * otherwise equivalent to [async.auto]{@link module:ControlFlow.auto}. + * + * @name autoInject + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.auto]{@link module:ControlFlow.auto} + * @category Control Flow + * @param {Object} tasks - An object, each of whose properties is an {@link AsyncFunction} of + * the form 'func([dependencies...], callback). The object's key of a property + * serves as the name of the task defined by that property, i.e. can be used + * when specifying requirements for other tasks. + * * The `callback` parameter is a `callback(err, result)` which must be called + * when finished, passing an `error` (which can be `null`) and the result of + * the function's execution. The remaining parameters name other tasks on + * which the task is dependent, and the results from those tasks are the + * arguments of those parameters. + * @param {Function} [callback] - An optional callback which is called when all + * the tasks have been completed. It receives the `err` argument if any `tasks` + * pass an error to their callback, and a `results` object with any completed + * task results, similar to `auto`. + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // The example from `auto` can be rewritten as follows: + * async.autoInject({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: function(get_data, make_folder, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }, + * email_link: function(write_file, callback) { + * // once the file is written let's email a link to it... + * // write_file contains the filename returned by write_file. + * callback(null, {'file':write_file, 'email':'user@example.com'}); + * } + * }, function(err, results) { + * console.log('err = ', err); + * console.log('email_link = ', results.email_link); + * }); + * + * // If you are using a JS minifier that mangles parameter names, `autoInject` + * // will not work with plain functions, since the parameter names will be + * // collapsed to a single letter identifier. To work around this, you can + * // explicitly specify the names of the parameters your task function needs + * // in an array, similar to Angular.js dependency injection. + * + * // This still has an advantage over plain `auto`, since the results a task + * // depends on are still spread into arguments. + * async.autoInject({ + * //... + * write_file: ['get_data', 'make_folder', function(get_data, make_folder, callback) { + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(write_file, callback) { + * callback(null, {'file':write_file, 'email':'user@example.com'}); + * }] + * //... + * }, function(err, results) { + * console.log('err = ', err); + * console.log('email_link = ', results.email_link); + * }); + */ + function autoInject(tasks, callback) { + var newTasks = {}; -// Must force callback to be called on nextTick, so that we don't -// emit 'drain' before the write() consumer gets the 'false' return -// value, and has a chance to attach a 'drain' listener. -function onwriteDrain(stream, state) { - if (state.length === 0 && state.needDrain) { - state.needDrain = false; - stream.emit('drain'); - } -} + Object.keys(tasks).forEach(key => { + var taskFn = tasks[key]; + var params; + var fnIsAsync = isAsync(taskFn); + var hasNoDeps = + (!fnIsAsync && taskFn.length === 1) || + (fnIsAsync && taskFn.length === 0); -// if there's something in the buffer waiting, then process it -function clearBuffer(stream, state) { - state.bufferProcessing = true; - var entry = state.bufferedRequest; + if (Array.isArray(taskFn)) { + params = [...taskFn]; + taskFn = params.pop(); - if (stream._writev && entry && entry.next) { - // Fast case, write everything using _writev() - var l = state.bufferedRequestCount; - var buffer = new Array(l); - var holder = state.corkedRequestsFree; - holder.entry = entry; + newTasks[key] = params.concat(params.length > 0 ? newTask : taskFn); + } else if (hasNoDeps) { + // no dependencies, use the function as-is + newTasks[key] = taskFn; + } else { + params = parseParams(taskFn); + if ((taskFn.length === 0 && !fnIsAsync) && params.length === 0) { + throw new Error("autoInject task functions require explicit parameters."); + } - var count = 0; - var allBuffers = true; - while (entry) { - buffer[count] = entry; - if (!entry.isBuf) allBuffers = false; - entry = entry.next; - count += 1; - } - buffer.allBuffers = allBuffers; + // remove callback param + if (!fnIsAsync) params.pop(); - doWrite(stream, state, true, state.length, buffer, '', holder.finish); + newTasks[key] = params.concat(newTask); + } - // doWrite is almost always async, defer these to save a bit of time - // as the hot path ends with doWrite - state.pendingcb++; - state.lastBufferedRequest = null; - if (holder.next) { - state.corkedRequestsFree = holder.next; - holder.next = null; - } else { - state.corkedRequestsFree = new CorkedRequest(state); - } - state.bufferedRequestCount = 0; - } else { - // Slow case, write chunks one-by-one - while (entry) { - var chunk = entry.chunk; - var encoding = entry.encoding; - var cb = entry.callback; - var len = state.objectMode ? 1 : chunk.length; + function newTask(results, taskCb) { + var newArgs = params.map(name => results[name]); + newArgs.push(taskCb); + wrapAsync(taskFn)(...newArgs); + } + }); - doWrite(stream, state, false, len, chunk, encoding, cb); - entry = entry.next; - state.bufferedRequestCount--; - // if we didn't call the onwrite immediately, then - // it means that we need to wait until it does. - // also, that means that the chunk and cb are currently - // being processed, so move the buffer counter past them. - if (state.writing) { - break; - } + return auto(newTasks, callback); } - if (entry === null) state.lastBufferedRequest = null; - } - - state.bufferedRequest = entry; - state.bufferProcessing = false; -} + // Simple doubly linked list (https://en.wikipedia.org/wiki/Doubly_linked_list) implementation + // used for queues. This implementation assumes that the node provided by the user can be modified + // to adjust the next and last properties. We implement only the minimal functionality + // for queue support. + class DLL { + constructor() { + this.head = this.tail = null; + this.length = 0; + } -Writable.prototype._write = function (chunk, encoding, cb) { - cb(new Error('_write() is not implemented')); -}; + removeLink(node) { + if (node.prev) node.prev.next = node.next; + else this.head = node.next; + if (node.next) node.next.prev = node.prev; + else this.tail = node.prev; -Writable.prototype._writev = null; + node.prev = node.next = null; + this.length -= 1; + return node; + } -Writable.prototype.end = function (chunk, encoding, cb) { - var state = this._writableState; + empty () { + while(this.head) this.shift(); + return this; + } - if (typeof chunk === 'function') { - cb = chunk; - chunk = null; - encoding = null; - } else if (typeof encoding === 'function') { - cb = encoding; - encoding = null; - } + insertAfter(node, newNode) { + newNode.prev = node; + newNode.next = node.next; + if (node.next) node.next.prev = newNode; + else this.tail = newNode; + node.next = newNode; + this.length += 1; + } - if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + insertBefore(node, newNode) { + newNode.prev = node.prev; + newNode.next = node; + if (node.prev) node.prev.next = newNode; + else this.head = newNode; + node.prev = newNode; + this.length += 1; + } - // .end() fully uncorks - if (state.corked) { - state.corked = 1; - this.uncork(); - } + unshift(node) { + if (this.head) this.insertBefore(this.head, node); + else setInitial(this, node); + } - // ignore unnecessary end() calls. - if (!state.ending && !state.finished) endWritable(this, state, cb); -}; + push(node) { + if (this.tail) this.insertAfter(this.tail, node); + else setInitial(this, node); + } -function needFinish(state) { - return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; -} -function callFinal(stream, state) { - stream._final(function (err) { - state.pendingcb--; - if (err) { - stream.emit('error', err); - } - state.prefinished = true; - stream.emit('prefinish'); - finishMaybe(stream, state); - }); -} -function prefinish(stream, state) { - if (!state.prefinished && !state.finalCalled) { - if (typeof stream._final === 'function') { - state.pendingcb++; - state.finalCalled = true; - pna.nextTick(callFinal, stream, state); - } else { - state.prefinished = true; - stream.emit('prefinish'); - } - } -} + shift() { + return this.head && this.removeLink(this.head); + } -function finishMaybe(stream, state) { - var need = needFinish(state); - if (need) { - prefinish(stream, state); - if (state.pendingcb === 0) { - state.finished = true; - stream.emit('finish'); - } - } - return need; -} + pop() { + return this.tail && this.removeLink(this.tail); + } -function endWritable(stream, state, cb) { - state.ending = true; - finishMaybe(stream, state); - if (cb) { - if (state.finished) pna.nextTick(cb);else stream.once('finish', cb); - } - state.ended = true; - stream.writable = false; -} + toArray() { + return [...this] + } -function onCorkedFinish(corkReq, state, err) { - var entry = corkReq.entry; - corkReq.entry = null; - while (entry) { - var cb = entry.callback; - state.pendingcb--; - cb(err); - entry = entry.next; - } - if (state.corkedRequestsFree) { - state.corkedRequestsFree.next = corkReq; - } else { - state.corkedRequestsFree = corkReq; - } -} + *[Symbol.iterator] () { + var cur = this.head; + while (cur) { + yield cur.data; + cur = cur.next; + } + } -Object.defineProperty(Writable.prototype, 'destroyed', { - get: function () { - if (this._writableState === undefined) { - return false; - } - return this._writableState.destroyed; - }, - set: function (value) { - // we ignore the value if the stream - // has not been initialized yet - if (!this._writableState) { - return; + remove (testFn) { + var curr = this.head; + while(curr) { + var {next} = curr; + if (testFn(curr)) { + this.removeLink(curr); + } + curr = next; + } + return this; + } } - // backward compatibility, the user is explicitly - // managing destroyed - this._writableState.destroyed = value; - } -}); + function setInitial(dll, node) { + dll.length = 1; + dll.head = dll.tail = node; + } -Writable.prototype.destroy = destroyImpl.destroy; -Writable.prototype._undestroy = destroyImpl.undestroy; -Writable.prototype._destroy = function (err, cb) { - this.end(); - cb(err); -}; + function queue(worker, concurrency, payload) { + if (concurrency == null) { + concurrency = 1; + } + else if(concurrency === 0) { + throw new RangeError('Concurrency must not be zero'); + } -/***/ }), + var _worker = wrapAsync(worker); + var numRunning = 0; + var workersList = []; + const events = { + error: [], + drain: [], + saturated: [], + unsaturated: [], + empty: [] + }; -/***/ 52743: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + function on (event, handler) { + events[event].push(handler); + } -"use strict"; + function once (event, handler) { + const handleAndRemove = (...args) => { + off(event, handleAndRemove); + handler(...args); + }; + events[event].push(handleAndRemove); + } + function off (event, handler) { + if (!event) return Object.keys(events).forEach(ev => events[ev] = []) + if (!handler) return events[event] = [] + events[event] = events[event].filter(ev => ev !== handler); + } -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + function trigger (event, ...args) { + events[event].forEach(handler => handler(...args)); + } -var Buffer = __nccwpck_require__(95237).Buffer; -var util = __nccwpck_require__(31669); + var processingScheduled = false; + function _insert(data, insertAtFront, rejectOnError, callback) { + if (callback != null && typeof callback !== 'function') { + throw new Error('task callback must be a function'); + } + q.started = true; -function copyBuffer(src, target, offset) { - src.copy(target, offset); -} + var res, rej; + function promiseCallback (err, ...args) { + // we don't care about the error, let the global error handler + // deal with it + if (err) return rejectOnError ? rej(err) : res() + if (args.length <= 1) return res(args[0]) + res(args); + } -module.exports = function () { - function BufferList() { - _classCallCheck(this, BufferList); + var item = { + data, + callback: rejectOnError ? + promiseCallback : + (callback || promiseCallback) + }; - this.head = null; - this.tail = null; - this.length = 0; - } + if (insertAtFront) { + q._tasks.unshift(item); + } else { + q._tasks.push(item); + } - BufferList.prototype.push = function push(v) { - var entry = { data: v, next: null }; - if (this.length > 0) this.tail.next = entry;else this.head = entry; - this.tail = entry; - ++this.length; - }; + if (!processingScheduled) { + processingScheduled = true; + setImmediate$1(() => { + processingScheduled = false; + q.process(); + }); + } - BufferList.prototype.unshift = function unshift(v) { - var entry = { data: v, next: this.head }; - if (this.length === 0) this.tail = entry; - this.head = entry; - ++this.length; - }; + if (rejectOnError || !callback) { + return new Promise((resolve, reject) => { + res = resolve; + rej = reject; + }) + } + } - BufferList.prototype.shift = function shift() { - if (this.length === 0) return; - var ret = this.head.data; - if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; - --this.length; - return ret; - }; + function _createCB(tasks) { + return function (err, ...args) { + numRunning -= 1; - BufferList.prototype.clear = function clear() { - this.head = this.tail = null; - this.length = 0; - }; + for (var i = 0, l = tasks.length; i < l; i++) { + var task = tasks[i]; - BufferList.prototype.join = function join(s) { - if (this.length === 0) return ''; - var p = this.head; - var ret = '' + p.data; - while (p = p.next) { - ret += s + p.data; - }return ret; - }; + var index = workersList.indexOf(task); + if (index === 0) { + workersList.shift(); + } else if (index > 0) { + workersList.splice(index, 1); + } - BufferList.prototype.concat = function concat(n) { - if (this.length === 0) return Buffer.alloc(0); - if (this.length === 1) return this.head.data; - var ret = Buffer.allocUnsafe(n >>> 0); - var p = this.head; - var i = 0; - while (p) { - copyBuffer(p.data, ret, i); - i += p.data.length; - p = p.next; - } - return ret; - }; + task.callback(err, ...args); - return BufferList; -}(); + if (err != null) { + trigger('error', err, task.data); + } + } -if (util && util.inspect && util.inspect.custom) { - module.exports.prototype[util.inspect.custom] = function () { - var obj = util.inspect({ length: this.length }); - return this.constructor.name + ' ' + obj; - }; -} + if (numRunning <= (q.concurrency - q.buffer) ) { + trigger('unsaturated'); + } -/***/ }), + if (q.idle()) { + trigger('drain'); + } + q.process(); + }; + } -/***/ 69147: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + function _maybeDrain(data) { + if (data.length === 0 && q.idle()) { + // call drain immediately if there are no tasks + setImmediate$1(() => trigger('drain')); + return true + } + return false + } -"use strict"; + const eventMethod = (name) => (handler) => { + if (!handler) { + return new Promise((resolve, reject) => { + once(name, (err, data) => { + if (err) return reject(err) + resolve(data); + }); + }) + } + off(name); + on(name, handler); + }; -/**/ + var isProcessing = false; + var q = { + _tasks: new DLL(), + *[Symbol.iterator] () { + yield* q._tasks[Symbol.iterator](); + }, + concurrency, + payload, + buffer: concurrency / 4, + started: false, + paused: false, + push (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, false, false, callback)) + } + return _insert(data, false, false, callback); + }, + pushAsync (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, false, true, callback)) + } + return _insert(data, false, true, callback); + }, + kill () { + off(); + q._tasks.empty(); + }, + unshift (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, true, false, callback)) + } + return _insert(data, true, false, callback); + }, + unshiftAsync (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, true, true, callback)) + } + return _insert(data, true, true, callback); + }, + remove (testFn) { + q._tasks.remove(testFn); + }, + process () { + // Avoid trying to start too many processing operations. This can occur + // when callbacks resolve synchronously (#1267). + if (isProcessing) { + return; + } + isProcessing = true; + while(!q.paused && numRunning < q.concurrency && q._tasks.length){ + var tasks = [], data = []; + var l = q._tasks.length; + if (q.payload) l = Math.min(l, q.payload); + for (var i = 0; i < l; i++) { + var node = q._tasks.shift(); + tasks.push(node); + workersList.push(node); + data.push(node.data); + } -var pna = __nccwpck_require__(47810); -/**/ + numRunning += 1; -// undocumented cb() API, needed for core, not for public API -function destroy(err, cb) { - var _this = this; + if (q._tasks.length === 0) { + trigger('empty'); + } - var readableDestroyed = this._readableState && this._readableState.destroyed; - var writableDestroyed = this._writableState && this._writableState.destroyed; + if (numRunning === q.concurrency) { + trigger('saturated'); + } - if (readableDestroyed || writableDestroyed) { - if (cb) { - cb(err); - } else if (err && (!this._writableState || !this._writableState.errorEmitted)) { - pna.nextTick(emitErrorNT, this, err); + var cb = onlyOnce(_createCB(tasks)); + _worker(data, cb); + } + isProcessing = false; + }, + length () { + return q._tasks.length; + }, + running () { + return numRunning; + }, + workersList () { + return workersList; + }, + idle() { + return q._tasks.length + numRunning === 0; + }, + pause () { + q.paused = true; + }, + resume () { + if (q.paused === false) { return; } + q.paused = false; + setImmediate$1(q.process); + } + }; + // define these as fixed properties, so people get useful errors when updating + Object.defineProperties(q, { + saturated: { + writable: false, + value: eventMethod('saturated') + }, + unsaturated: { + writable: false, + value: eventMethod('unsaturated') + }, + empty: { + writable: false, + value: eventMethod('empty') + }, + drain: { + writable: false, + value: eventMethod('drain') + }, + error: { + writable: false, + value: eventMethod('error') + }, + }); + return q; } - return this; - } - - // we set destroyed to true before firing error callbacks in order - // to make it re-entrance safe in case destroy() is called within callbacks - - if (this._readableState) { - this._readableState.destroyed = true; - } - - // if this is a duplex stream mark the writable part as destroyed as well - if (this._writableState) { - this._writableState.destroyed = true; - } - this._destroy(err || null, function (err) { - if (!cb && err) { - pna.nextTick(emitErrorNT, _this, err); - if (_this._writableState) { - _this._writableState.errorEmitted = true; - } - } else if (cb) { - cb(err); + /** + * Creates a `cargo` object with the specified payload. Tasks added to the + * cargo will be processed altogether (up to the `payload` limit). If the + * `worker` is in progress, the task is queued until it becomes available. Once + * the `worker` has completed some tasks, each callback of those tasks is + * called. Check out [these](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) [animations](https://camo.githubusercontent.com/f4810e00e1c5f5f8addbe3e9f49064fd5d102699/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130312f38346339323036362d356632392d313165322d383134662d3964336430323431336266642e676966) + * for how `cargo` and `queue` work. + * + * While [`queue`]{@link module:ControlFlow.queue} passes only one task to one of a group of workers + * at a time, cargo passes an array of tasks to a single worker, repeating + * when the worker is finished. + * + * @name cargo + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.queue]{@link module:ControlFlow.queue} + * @category Control Flow + * @param {AsyncFunction} worker - An asynchronous function for processing an array + * of queued tasks. Invoked with `(tasks, callback)`. + * @param {number} [payload=Infinity] - An optional `integer` for determining + * how many tasks should be processed per round; if omitted, the default is + * unlimited. + * @returns {module:ControlFlow.QueueObject} A cargo object to manage the tasks. Callbacks can + * attached as certain properties to listen for specific events during the + * lifecycle of the cargo and inner queue. + * @example + * + * // create a cargo object with payload 2 + * var cargo = async.cargo(function(tasks, callback) { + * for (var i=0; i { + _iteratee(memo, x, (err, v) => { + memo = v; + iterCb(err); + }); + }, err => callback(err, memo)); + } + var reduce$1 = awaitify(reduce, 4); - if (this._writableState) { - this._writableState.destroyed = false; - this._writableState.ended = false; - this._writableState.ending = false; - this._writableState.finished = false; - this._writableState.errorEmitted = false; - } -} + /** + * Version of the compose function that is more natural to read. Each function + * consumes the return value of the previous function. It is the equivalent of + * [compose]{@link module:ControlFlow.compose} with the arguments reversed. + * + * Each function is executed with the `this` binding of the composed function. + * + * @name seq + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.compose]{@link module:ControlFlow.compose} + * @category Control Flow + * @param {...AsyncFunction} functions - the asynchronous functions to compose + * @returns {Function} a function that composes the `functions` in order + * @example + * + * // Requires lodash (or underscore), express3 and dresende's orm2. + * // Part of an app, that fetches cats of the logged user. + * // This example uses `seq` function to avoid overnesting and error + * // handling clutter. + * app.get('/cats', function(request, response) { + * var User = request.models.User; + * async.seq( + * _.bind(User.get, User), // 'User.get' has signature (id, callback(err, data)) + * function(user, fn) { + * user.getCats(fn); // 'getCats' has signature (callback(err, data)) + * } + * )(req.session.user_id, function (err, cats) { + * if (err) { + * console.error(err); + * response.json({ status: 'error', message: err.message }); + * } else { + * response.json({ status: 'ok', message: 'Cats found', data: cats }); + * } + * }); + * }); + */ + function seq(...functions) { + var _functions = functions.map(wrapAsync); + return function (...args) { + var that = this; -function emitErrorNT(self, err) { - self.emit('error', err); -} + var cb = args[args.length - 1]; + if (typeof cb == 'function') { + args.pop(); + } else { + cb = promiseCallback(); + } -module.exports = { - destroy: destroy, - undestroy: undestroy -}; + reduce$1(_functions, args, (newargs, fn, iterCb) => { + fn.apply(that, newargs.concat((err, ...nextargs) => { + iterCb(err, nextargs); + })); + }, + (err, results) => cb(err, ...results)); -/***/ }), + return cb[PROMISE_SYMBOL] + }; + } -/***/ 69777: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * Creates a function which is a composition of the passed asynchronous + * functions. Each function consumes the return value of the function that + * follows. Composing functions `f()`, `g()`, and `h()` would produce the result + * of `f(g(h()))`, only this version uses callbacks to obtain the return values. + * + * If the last argument to the composed function is not a function, a promise + * is returned when you call it. + * + * Each function is executed with the `this` binding of the composed function. + * + * @name compose + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {...AsyncFunction} functions - the asynchronous functions to compose + * @returns {Function} an asynchronous function that is the composed + * asynchronous `functions` + * @example + * + * function add1(n, callback) { + * setTimeout(function () { + * callback(null, n + 1); + * }, 10); + * } + * + * function mul3(n, callback) { + * setTimeout(function () { + * callback(null, n * 3); + * }, 10); + * } + * + * var add1mul3 = async.compose(mul3, add1); + * add1mul3(4, function (err, result) { + * // result now equals 15 + * }); + */ + function compose(...args) { + return seq(...args.reverse()); + } -module.exports = __nccwpck_require__(92413); + /** + * The same as [`map`]{@link module:Collections.map} but runs a maximum of `limit` async operations at a time. + * + * @name mapLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.map]{@link module:Collections.map} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ + function mapLimit (coll, limit, iteratee, callback) { + return _asyncMap(eachOfLimit(limit), coll, iteratee, callback) + } + var mapLimit$1 = awaitify(mapLimit, 4); + /** + * The same as [`concat`]{@link module:Collections.concat} but runs a maximum of `limit` async operations at a time. + * + * @name concatLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.concat]{@link module:Collections.concat} + * @category Collection + * @alias flatMapLimit + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, + * which should use an array as its result. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + */ + function concatLimit(coll, limit, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return mapLimit$1(coll, limit, (val, iterCb) => { + _iteratee(val, (err, ...args) => { + if (err) return iterCb(err); + return iterCb(err, args); + }); + }, (err, mapResults) => { + var result = []; + for (var i = 0; i < mapResults.length; i++) { + if (mapResults[i]) { + result = result.concat(...mapResults[i]); + } + } -/***/ }), + return callback(err, result); + }); + } + var concatLimit$1 = awaitify(concatLimit, 4); -/***/ 64043: -/***/ ((module, exports, __nccwpck_require__) => { + /** + * Applies `iteratee` to each item in `coll`, concatenating the results. Returns + * the concatenated list. The `iteratee`s are called in parallel, and the + * results are concatenated as they return. The results array will be returned in + * the original order of `coll` passed to the `iteratee` function. + * + * @name concat + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @alias flatMap + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, + * which should use an array as its result. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + * @example + * + * async.concat(['dir1','dir2','dir3'], fs.readdir, function(err, files) { + * // files is now a list of filenames that exist in the 3 directories + * }); + */ + function concat(coll, iteratee, callback) { + return concatLimit$1(coll, Infinity, iteratee, callback) + } + var concat$1 = awaitify(concat, 3); -var Stream = __nccwpck_require__(92413); -if (process.env.READABLE_STREAM === 'disable' && Stream) { - module.exports = Stream; - exports = module.exports = Stream.Readable; - exports.Readable = Stream.Readable; - exports.Writable = Stream.Writable; - exports.Duplex = Stream.Duplex; - exports.Transform = Stream.Transform; - exports.PassThrough = Stream.PassThrough; - exports.Stream = Stream; -} else { - exports = module.exports = __nccwpck_require__(70661); - exports.Stream = Stream || exports; - exports.Readable = exports; - exports.Writable = __nccwpck_require__(7631); - exports.Duplex = __nccwpck_require__(92931); - exports.Transform = __nccwpck_require__(53937); - exports.PassThrough = __nccwpck_require__(61894); -} + /** + * The same as [`concat`]{@link module:Collections.concat} but runs only a single async operation at a time. + * + * @name concatSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.concat]{@link module:Collections.concat} + * @category Collection + * @alias flatMapSeries + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`. + * The iteratee should complete with an array an array of results. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + */ + function concatSeries(coll, iteratee, callback) { + return concatLimit$1(coll, 1, iteratee, callback) + } + var concatSeries$1 = awaitify(concatSeries, 3); + /** + * Returns a function that when called, calls-back with the values provided. + * Useful as the first function in a [`waterfall`]{@link module:ControlFlow.waterfall}, or for plugging values in to + * [`auto`]{@link module:ControlFlow.auto}. + * + * @name constant + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {...*} arguments... - Any number of arguments to automatically invoke + * callback with. + * @returns {AsyncFunction} Returns a function that when invoked, automatically + * invokes the callback with the previous given arguments. + * @example + * + * async.waterfall([ + * async.constant(42), + * function (value, next) { + * // value === 42 + * }, + * //... + * ], callback); + * + * async.waterfall([ + * async.constant(filename, "utf8"), + * fs.readFile, + * function (fileData, next) { + * //... + * } + * //... + * ], callback); + * + * async.auto({ + * hostname: async.constant("https://server.net/"), + * port: findFreePort, + * launchServer: ["hostname", "port", function (options, cb) { + * startServer(options, cb); + * }], + * //... + * }, callback); + */ + function constant(...args) { + return function (...ignoredArgs/*, callback*/) { + var callback = ignoredArgs.pop(); + return callback(null, ...args); + }; + } -/***/ }), + function _createTester(check, getResult) { + return (eachfn, arr, _iteratee, cb) => { + var testPassed = false; + var testResult; + const iteratee = wrapAsync(_iteratee); + eachfn(arr, (value, _, callback) => { + iteratee(value, (err, result) => { + if (err || err === false) return callback(err); -/***/ 95237: -/***/ ((module, exports, __nccwpck_require__) => { + if (check(result) && !testResult) { + testPassed = true; + testResult = getResult(true, value); + return callback(null, breakLoop); + } + callback(); + }); + }, err => { + if (err) return cb(err); + cb(null, testPassed ? testResult : getResult(false)); + }); + }; + } -/* eslint-disable node/no-deprecated-api */ -var buffer = __nccwpck_require__(64293) -var Buffer = buffer.Buffer + /** + * Returns the first value in `coll` that passes an async truth test. The + * `iteratee` is applied in parallel, meaning the first iteratee to return + * `true` will fire the detect `callback` with that result. That means the + * result might not be the first item in the original `coll` (in terms of order) + * that passes the test. -// alternative to using Object.keys for old browsers -function copyProps (src, dst) { - for (var key in src) { - dst[key] = src[key] - } -} -if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { - module.exports = buffer -} else { - // Copy properties from require('buffer') - copyProps(buffer, exports) - exports.Buffer = SafeBuffer -} + * If order within the original `coll` is important, then look at + * [`detectSeries`]{@link module:Collections.detectSeries}. + * + * @name detect + * @static + * @memberOf module:Collections + * @method + * @alias find + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns A Promise, if no callback is passed + * @example + * + * async.detect(['file1','file2','file3'], function(filePath, callback) { + * fs.access(filePath, function(err) { + * callback(null, !err) + * }); + * }, function(err, result) { + * // result now equals the first file in the list that exists + * }); + */ + function detect(coll, iteratee, callback) { + return _createTester(bool => bool, (res, item) => item)(eachOf$1, coll, iteratee, callback) + } + var detect$1 = awaitify(detect, 3); -function SafeBuffer (arg, encodingOrOffset, length) { - return Buffer(arg, encodingOrOffset, length) -} + /** + * The same as [`detect`]{@link module:Collections.detect} but runs a maximum of `limit` async operations at a + * time. + * + * @name detectLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.detect]{@link module:Collections.detect} + * @alias findLimit + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns a Promise if no callback is passed + */ + function detectLimit(coll, limit, iteratee, callback) { + return _createTester(bool => bool, (res, item) => item)(eachOfLimit(limit), coll, iteratee, callback) + } + var detectLimit$1 = awaitify(detectLimit, 4); -// Copy static methods from Buffer -copyProps(Buffer, SafeBuffer) + /** + * The same as [`detect`]{@link module:Collections.detect} but runs only a single async operation at a time. + * + * @name detectSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.detect]{@link module:Collections.detect} + * @alias findSeries + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns a Promise if no callback is passed + */ + function detectSeries(coll, iteratee, callback) { + return _createTester(bool => bool, (res, item) => item)(eachOfLimit(1), coll, iteratee, callback) + } -SafeBuffer.from = function (arg, encodingOrOffset, length) { - if (typeof arg === 'number') { - throw new TypeError('Argument must not be a number') - } - return Buffer(arg, encodingOrOffset, length) -} + var detectSeries$1 = awaitify(detectSeries, 3); -SafeBuffer.alloc = function (size, fill, encoding) { - if (typeof size !== 'number') { - throw new TypeError('Argument must be a number') - } - var buf = Buffer(size) - if (fill !== undefined) { - if (typeof encoding === 'string') { - buf.fill(fill, encoding) - } else { - buf.fill(fill) + function consoleFunc(name) { + return (fn, ...args) => wrapAsync(fn)(...args, (err, ...resultArgs) => { + if (typeof console === 'object') { + if (err) { + if (console.error) { + console.error(err); + } + } else if (console[name]) { + resultArgs.forEach(x => console[name](x)); + } + } + }) } - } else { - buf.fill(0) - } - return buf -} -SafeBuffer.allocUnsafe = function (size) { - if (typeof size !== 'number') { - throw new TypeError('Argument must be a number') - } - return Buffer(size) -} + /** + * Logs the result of an [`async` function]{@link AsyncFunction} to the + * `console` using `console.dir` to display the properties of the resulting object. + * Only works in Node.js or in browsers that support `console.dir` and + * `console.error` (such as FF and Chrome). + * If multiple arguments are returned from the async function, + * `console.dir` is called on each argument in order. + * + * @name dir + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} function - The function you want to eventually apply + * all arguments to. + * @param {...*} arguments... - Any number of arguments to apply to the function. + * @example + * + * // in a module + * var hello = function(name, callback) { + * setTimeout(function() { + * callback(null, {hello: name}); + * }, 1000); + * }; + * + * // in the node repl + * node> async.dir(hello, 'world'); + * {hello: 'world'} + */ + var dir = consoleFunc('dir'); -SafeBuffer.allocUnsafeSlow = function (size) { - if (typeof size !== 'number') { - throw new TypeError('Argument must be a number') - } - return buffer.SlowBuffer(size) -} + /** + * The post-check version of [`whilst`]{@link module:ControlFlow.whilst}. To reflect the difference in + * the order of operations, the arguments `test` and `iteratee` are switched. + * + * `doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript. + * + * @name doWhilst + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.whilst]{@link module:ControlFlow.whilst} + * @category Control Flow + * @param {AsyncFunction} iteratee - A function which is called each time `test` + * passes. Invoked with (callback). + * @param {AsyncFunction} test - asynchronous truth test to perform after each + * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the + * non-error args from the previous callback of `iteratee`. + * @param {Function} [callback] - A callback which is called after the test + * function has failed and repeated execution of `iteratee` has stopped. + * `callback` will be passed an error and any arguments passed to the final + * `iteratee`'s callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + */ + function doWhilst(iteratee, test, callback) { + callback = onlyOnce(callback); + var _fn = wrapAsync(iteratee); + var _test = wrapAsync(test); + var results; + function next(err, ...args) { + if (err) return callback(err); + if (err === false) return; + results = args; + _test(...args, check); + } -/***/ }), + function check(err, truth) { + if (err) return callback(err); + if (err === false) return; + if (!truth) return callback(null, ...results); + _fn(next); + } -/***/ 63824: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + return check(null, true); + } -"use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. + var doWhilst$1 = awaitify(doWhilst, 3); + /** + * Like ['doWhilst']{@link module:ControlFlow.doWhilst}, except the `test` is inverted. Note the + * argument ordering differs from `until`. + * + * @name doUntil + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.doWhilst]{@link module:ControlFlow.doWhilst} + * @category Control Flow + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` fails. Invoked with (callback). + * @param {AsyncFunction} test - asynchronous truth test to perform after each + * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the + * non-error args from the previous callback of `iteratee` + * @param {Function} [callback] - A callback which is called after the test + * function has passed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + */ + function doUntil(iteratee, test, callback) { + const _test = wrapAsync(test); + return doWhilst$1(iteratee, (...args) => { + const cb = args.pop(); + _test(...args, (err, truth) => cb (err, !truth)); + }, callback); + } + function _withoutIndex(iteratee) { + return (value, index, callback) => iteratee(value, callback); + } -/**/ + /** + * Applies the function `iteratee` to each item in `coll`, in parallel. + * The `iteratee` is called with an item from the list, and a callback for when + * it has finished. If the `iteratee` passes an error to its `callback`, the + * main `callback` (for the `each` function) is immediately called with the + * error. + * + * Note, that since this function applies `iteratee` to each item in parallel, + * there is no guarantee that the iteratee functions will complete in order. + * + * @name each + * @static + * @memberOf module:Collections + * @method + * @alias forEach + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to + * each item in `coll`. Invoked with (item, callback). + * The array index is not passed to the iteratee. + * If you need the index, use `eachOf`. + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // assuming openFiles is an array of file names and saveFile is a function + * // to save the modified contents of that file: + * + * async.each(openFiles, saveFile, function(err){ + * // if any of the saves produced an error, err would equal that error + * }); + * + * // assuming openFiles is an array of file names + * async.each(openFiles, function(file, callback) { + * + * // Perform operation on file here. + * console.log('Processing file ' + file); + * + * if( file.length > 32 ) { + * console.log('This file name is too long'); + * callback('File name too long'); + * } else { + * // Do work to process file here + * console.log('File processed'); + * callback(); + * } + * }, function(err) { + * // if any of the file processing produced an error, err would equal that error + * if( err ) { + * // One of the iterations produced an error. + * // All processing will now stop. + * console.log('A file failed to process'); + * } else { + * console.log('All files have been processed successfully'); + * } + * }); + */ + function eachLimit(coll, iteratee, callback) { + return eachOf$1(coll, _withoutIndex(wrapAsync(iteratee)), callback); + } -var Buffer = __nccwpck_require__(95237).Buffer; -/**/ + var each = awaitify(eachLimit, 3); -var isEncoding = Buffer.isEncoding || function (encoding) { - encoding = '' + encoding; - switch (encoding && encoding.toLowerCase()) { - case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': - return true; - default: - return false; - } -}; + /** + * The same as [`each`]{@link module:Collections.each} but runs a maximum of `limit` async operations at a time. + * + * @name eachLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.each]{@link module:Collections.each} + * @alias forEachLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The array index is not passed to the iteratee. + * If you need the index, use `eachOfLimit`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ + function eachLimit$1(coll, limit, iteratee, callback) { + return eachOfLimit(limit)(coll, _withoutIndex(wrapAsync(iteratee)), callback); + } + var eachLimit$2 = awaitify(eachLimit$1, 4); -function _normalizeEncoding(enc) { - if (!enc) return 'utf8'; - var retried; - while (true) { - switch (enc) { - case 'utf8': - case 'utf-8': - return 'utf8'; - case 'ucs2': - case 'ucs-2': - case 'utf16le': - case 'utf-16le': - return 'utf16le'; - case 'latin1': - case 'binary': - return 'latin1'; - case 'base64': - case 'ascii': - case 'hex': - return enc; - default: - if (retried) return; // undefined - enc = ('' + enc).toLowerCase(); - retried = true; - } - } -}; - -// Do not cache `Buffer.isEncoding` when checking encoding names as some -// modules monkey-patch it to support additional encodings -function normalizeEncoding(enc) { - var nenc = _normalizeEncoding(enc); - if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); - return nenc || enc; -} - -// StringDecoder provides an interface for efficiently splitting a series of -// buffers into a series of JS strings without breaking apart multi-byte -// characters. -exports.s = StringDecoder; -function StringDecoder(encoding) { - this.encoding = normalizeEncoding(encoding); - var nb; - switch (this.encoding) { - case 'utf16le': - this.text = utf16Text; - this.end = utf16End; - nb = 4; - break; - case 'utf8': - this.fillLast = utf8FillLast; - nb = 4; - break; - case 'base64': - this.text = base64Text; - this.end = base64End; - nb = 3; - break; - default: - this.write = simpleWrite; - this.end = simpleEnd; - return; - } - this.lastNeed = 0; - this.lastTotal = 0; - this.lastChar = Buffer.allocUnsafe(nb); -} - -StringDecoder.prototype.write = function (buf) { - if (buf.length === 0) return ''; - var r; - var i; - if (this.lastNeed) { - r = this.fillLast(buf); - if (r === undefined) return ''; - i = this.lastNeed; - this.lastNeed = 0; - } else { - i = 0; - } - if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); - return r || ''; -}; - -StringDecoder.prototype.end = utf8End; - -// Returns only complete characters in a Buffer -StringDecoder.prototype.text = utf8Text; - -// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer -StringDecoder.prototype.fillLast = function (buf) { - if (this.lastNeed <= buf.length) { - buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); - return this.lastChar.toString(this.encoding, 0, this.lastTotal); - } - buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); - this.lastNeed -= buf.length; -}; - -// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a -// continuation byte. If an invalid byte is detected, -2 is returned. -function utf8CheckByte(byte) { - if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; - return byte >> 6 === 0x02 ? -1 : -2; -} + /** + * The same as [`each`]{@link module:Collections.each} but runs only a single async operation at a time. + * + * Note, that unlike [`each`]{@link module:Collections.each}, this function applies iteratee to each item + * in series and therefore the iteratee functions will complete in order. -// Checks at most 3 bytes at the end of a Buffer in order to detect an -// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) -// needed to complete the UTF-8 character (if applicable) are returned. -function utf8CheckIncomplete(self, buf, i) { - var j = buf.length - 1; - if (j < i) return 0; - var nb = utf8CheckByte(buf[j]); - if (nb >= 0) { - if (nb > 0) self.lastNeed = nb - 1; - return nb; - } - if (--j < i || nb === -2) return 0; - nb = utf8CheckByte(buf[j]); - if (nb >= 0) { - if (nb > 0) self.lastNeed = nb - 2; - return nb; - } - if (--j < i || nb === -2) return 0; - nb = utf8CheckByte(buf[j]); - if (nb >= 0) { - if (nb > 0) { - if (nb === 2) nb = 0;else self.lastNeed = nb - 3; + * @name eachSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.each]{@link module:Collections.each} + * @alias forEachSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each + * item in `coll`. + * The array index is not passed to the iteratee. + * If you need the index, use `eachOfSeries`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ + function eachSeries(coll, iteratee, callback) { + return eachLimit$2(coll, 1, iteratee, callback) } - return nb; - } - return 0; -} + var eachSeries$1 = awaitify(eachSeries, 3); -// Validates as many continuation bytes for a multi-byte UTF-8 character as -// needed or are available. If we see a non-continuation byte where we expect -// one, we "replace" the validated continuation bytes we've seen so far with -// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding -// behavior. The continuation byte check is included three times in the case -// where all of the continuation bytes for a character exist in the same buffer. -// It is also done this way as a slight performance increase instead of using a -// loop. -function utf8CheckExtraBytes(self, buf, p) { - if ((buf[0] & 0xC0) !== 0x80) { - self.lastNeed = 0; - return '\ufffd'; - } - if (self.lastNeed > 1 && buf.length > 1) { - if ((buf[1] & 0xC0) !== 0x80) { - self.lastNeed = 1; - return '\ufffd'; - } - if (self.lastNeed > 2 && buf.length > 2) { - if ((buf[2] & 0xC0) !== 0x80) { - self.lastNeed = 2; - return '\ufffd'; - } + /** + * Wrap an async function and ensure it calls its callback on a later tick of + * the event loop. If the function already calls its callback on a next tick, + * no extra deferral is added. This is useful for preventing stack overflows + * (`RangeError: Maximum call stack size exceeded`) and generally keeping + * [Zalgo](http://blog.izs.me/post/59142742143/designing-apis-for-asynchrony) + * contained. ES2017 `async` functions are returned as-is -- they are immune + * to Zalgo's corrupting influences, as they always resolve on a later tick. + * + * @name ensureAsync + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - an async function, one that expects a node-style + * callback as its last argument. + * @returns {AsyncFunction} Returns a wrapped function with the exact same call + * signature as the function passed in. + * @example + * + * function sometimesAsync(arg, callback) { + * if (cache[arg]) { + * return callback(null, cache[arg]); // this would be synchronous!! + * } else { + * doSomeIO(arg, callback); // this IO would be asynchronous + * } + * } + * + * // this has a risk of stack overflows if many results are cached in a row + * async.mapSeries(args, sometimesAsync, done); + * + * // this will defer sometimesAsync's callback if necessary, + * // preventing stack overflows + * async.mapSeries(args, async.ensureAsync(sometimesAsync), done); + */ + function ensureAsync(fn) { + if (isAsync(fn)) return fn; + return function (...args/*, callback*/) { + var callback = args.pop(); + var sync = true; + args.push((...innerArgs) => { + if (sync) { + setImmediate$1(() => callback(...innerArgs)); + } else { + callback(...innerArgs); + } + }); + fn.apply(this, args); + sync = false; + }; } - } -} - -// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. -function utf8FillLast(buf) { - var p = this.lastTotal - this.lastNeed; - var r = utf8CheckExtraBytes(this, buf, p); - if (r !== undefined) return r; - if (this.lastNeed <= buf.length) { - buf.copy(this.lastChar, p, 0, this.lastNeed); - return this.lastChar.toString(this.encoding, 0, this.lastTotal); - } - buf.copy(this.lastChar, p, 0, buf.length); - this.lastNeed -= buf.length; -} - -// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a -// partial character, the character's bytes are buffered until the required -// number of bytes are available. -function utf8Text(buf, i) { - var total = utf8CheckIncomplete(this, buf, i); - if (!this.lastNeed) return buf.toString('utf8', i); - this.lastTotal = total; - var end = buf.length - (total - this.lastNeed); - buf.copy(this.lastChar, 0, end); - return buf.toString('utf8', i, end); -} -// For UTF-8, a replacement character is added when ending on a partial -// character. -function utf8End(buf) { - var r = buf && buf.length ? this.write(buf) : ''; - if (this.lastNeed) return r + '\ufffd'; - return r; -} + /** + * Returns `true` if every element in `coll` satisfies an async test. If any + * iteratee call returns `false`, the main `callback` is immediately called. + * + * @name every + * @static + * @memberOf module:Collections + * @method + * @alias all + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in parallel. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * async.every(['file1','file2','file3'], function(filePath, callback) { + * fs.access(filePath, function(err) { + * callback(null, !err) + * }); + * }, function(err, result) { + * // if result is true then every file exists + * }); + */ + function every(coll, iteratee, callback) { + return _createTester(bool => !bool, res => !res)(eachOf$1, coll, iteratee, callback) + } + var every$1 = awaitify(every, 3); -// UTF-16LE typically needs two bytes per character, but even if we have an even -// number of bytes available, we need to check if we end on a leading/high -// surrogate. In that case, we need to wait for the next two bytes in order to -// decode the last character properly. -function utf16Text(buf, i) { - if ((buf.length - i) % 2 === 0) { - var r = buf.toString('utf16le', i); - if (r) { - var c = r.charCodeAt(r.length - 1); - if (c >= 0xD800 && c <= 0xDBFF) { - this.lastNeed = 2; - this.lastTotal = 4; - this.lastChar[0] = buf[buf.length - 2]; - this.lastChar[1] = buf[buf.length - 1]; - return r.slice(0, -1); - } + /** + * The same as [`every`]{@link module:Collections.every} but runs a maximum of `limit` async operations at a time. + * + * @name everyLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.every]{@link module:Collections.every} + * @alias allLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in parallel. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ + function everyLimit(coll, limit, iteratee, callback) { + return _createTester(bool => !bool, res => !res)(eachOfLimit(limit), coll, iteratee, callback) } - return r; - } - this.lastNeed = 1; - this.lastTotal = 2; - this.lastChar[0] = buf[buf.length - 1]; - return buf.toString('utf16le', i, buf.length - 1); -} + var everyLimit$1 = awaitify(everyLimit, 4); -// For UTF-16LE we do not explicitly append special replacement characters if we -// end on a partial character, we simply let v8 handle that. -function utf16End(buf) { - var r = buf && buf.length ? this.write(buf) : ''; - if (this.lastNeed) { - var end = this.lastTotal - this.lastNeed; - return r + this.lastChar.toString('utf16le', 0, end); - } - return r; -} + /** + * The same as [`every`]{@link module:Collections.every} but runs only a single async operation at a time. + * + * @name everySeries + * @static + * @memberOf module:Collections + * @method + * @see [async.every]{@link module:Collections.every} + * @alias allSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in series. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ + function everySeries(coll, iteratee, callback) { + return _createTester(bool => !bool, res => !res)(eachOfSeries$1, coll, iteratee, callback) + } + var everySeries$1 = awaitify(everySeries, 3); -function base64Text(buf, i) { - var n = (buf.length - i) % 3; - if (n === 0) return buf.toString('base64', i); - this.lastNeed = 3 - n; - this.lastTotal = 3; - if (n === 1) { - this.lastChar[0] = buf[buf.length - 1]; - } else { - this.lastChar[0] = buf[buf.length - 2]; - this.lastChar[1] = buf[buf.length - 1]; - } - return buf.toString('base64', i, buf.length - n); -} + function filterArray(eachfn, arr, iteratee, callback) { + var truthValues = new Array(arr.length); + eachfn(arr, (x, index, iterCb) => { + iteratee(x, (err, v) => { + truthValues[index] = !!v; + iterCb(err); + }); + }, err => { + if (err) return callback(err); + var results = []; + for (var i = 0; i < arr.length; i++) { + if (truthValues[i]) results.push(arr[i]); + } + callback(null, results); + }); + } -function base64End(buf) { - var r = buf && buf.length ? this.write(buf) : ''; - if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); - return r; -} + function filterGeneric(eachfn, coll, iteratee, callback) { + var results = []; + eachfn(coll, (x, index, iterCb) => { + iteratee(x, (err, v) => { + if (err) return iterCb(err); + if (v) { + results.push({index, value: x}); + } + iterCb(err); + }); + }, err => { + if (err) return callback(err); + callback(null, results + .sort((a, b) => a.index - b.index) + .map(v => v.value)); + }); + } -// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) -function simpleWrite(buf) { - return buf.toString(this.encoding); -} + function _filter(eachfn, coll, iteratee, callback) { + var filter = isArrayLike(coll) ? filterArray : filterGeneric; + return filter(eachfn, coll, wrapAsync(iteratee), callback); + } -function simpleEnd(buf) { - return buf && buf.length ? this.write(buf) : ''; -} + /** + * Returns a new array of all the values in `coll` which pass an async truth + * test. This operation is performed in parallel, but the results array will be + * in the same order as the original. + * + * @name filter + * @static + * @memberOf module:Collections + * @method + * @alias select + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback provided + * @example + * + * async.filter(['file1','file2','file3'], function(filePath, callback) { + * fs.access(filePath, function(err) { + * callback(null, !err) + * }); + * }, function(err, results) { + * // results now equals an array of the existing files + * }); + */ + function filter (coll, iteratee, callback) { + return _filter(eachOf$1, coll, iteratee, callback) + } + var filter$1 = awaitify(filter, 3); -/***/ }), + /** + * The same as [`filter`]{@link module:Collections.filter} but runs a maximum of `limit` async operations at a + * time. + * + * @name filterLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @alias selectLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback provided + */ + function filterLimit (coll, limit, iteratee, callback) { + return _filter(eachOfLimit(limit), coll, iteratee, callback) + } + var filterLimit$1 = awaitify(filterLimit, 4); -/***/ 10165: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * The same as [`filter`]{@link module:Collections.filter} but runs only a single async operation at a time. + * + * @name filterSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @alias selectSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results) + * @returns {Promise} a promise, if no callback provided + */ + function filterSeries (coll, iteratee, callback) { + return _filter(eachOfSeries$1, coll, iteratee, callback) + } + var filterSeries$1 = awaitify(filterSeries, 3); -"use strict"; + /** + * Calls the asynchronous function `fn` with a callback parameter that allows it + * to call itself again, in series, indefinitely. -var EventEmitter = __nccwpck_require__(28614).EventEmitter -var util = __nccwpck_require__(31669) + * If an error is passed to the callback then `errback` is called with the + * error, and execution stops, otherwise it will never be called. + * + * @name forever + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} fn - an async function to call repeatedly. + * Invoked with (next). + * @param {Function} [errback] - when `fn` passes an error to it's callback, + * this function will be called, and execution stops. Invoked with (err). + * @returns {Promise} a promise that rejects if an error occurs and an errback + * is not passed + * @example + * + * async.forever( + * function(next) { + * // next is suitable for passing to things that need a callback(err [, whatever]); + * // it will result in this function being called again. + * }, + * function(err) { + * // if next is called with a value in its first parameter, it will appear + * // in here as 'err', and execution will stop. + * } + * ); + */ + function forever(fn, errback) { + var done = onlyOnce(errback); + var task = wrapAsync(ensureAsync(fn)); -var trackerId = 0 -var TrackerBase = module.exports = function (name) { - EventEmitter.call(this) - this.id = ++trackerId - this.name = name -} -util.inherits(TrackerBase, EventEmitter) + function next(err) { + if (err) return done(err); + if (err === false) return; + task(next); + } + return next(); + } + var forever$1 = awaitify(forever, 2); + /** + * The same as [`groupBy`]{@link module:Collections.groupBy} but runs a maximum of `limit` async operations at a time. + * + * @name groupByLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.groupBy]{@link module:Collections.groupBy} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whoses + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + */ + function groupByLimit(coll, limit, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return mapLimit$1(coll, limit, (val, iterCb) => { + _iteratee(val, (err, key) => { + if (err) return iterCb(err); + return iterCb(err, {key, val}); + }); + }, (err, mapResults) => { + var result = {}; + // from MDN, handle object having an `hasOwnProperty` prop + var {hasOwnProperty} = Object.prototype; -/***/ }), + for (var i = 0; i < mapResults.length; i++) { + if (mapResults[i]) { + var {key} = mapResults[i]; + var {val} = mapResults[i]; -/***/ 60660: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (hasOwnProperty.call(result, key)) { + result[key].push(val); + } else { + result[key] = [val]; + } + } + } -"use strict"; + return callback(err, result); + }); + } -var util = __nccwpck_require__(31669) -var TrackerBase = __nccwpck_require__(10165) -var Tracker = __nccwpck_require__(8074) -var TrackerStream = __nccwpck_require__(31375) + var groupByLimit$1 = awaitify(groupByLimit, 4); -var TrackerGroup = module.exports = function (name) { - TrackerBase.call(this, name) - this.parentGroup = null - this.trackers = [] - this.completion = {} - this.weight = {} - this.totalWeight = 0 - this.finished = false - this.bubbleChange = bubbleChange(this) -} -util.inherits(TrackerGroup, TrackerBase) + /** + * Returns a new object, where each value corresponds to an array of items, from + * `coll`, that returned the corresponding key. That is, the keys of the object + * correspond to the values passed to the `iteratee` callback. + * + * Note: Since this function applies the `iteratee` to each item in parallel, + * there is no guarantee that the `iteratee` functions will complete in order. + * However, the values for each key in the `result` will be in the same order as + * the original `coll`. For Objects, the values will roughly be in the order of + * the original Objects' keys (but this can vary across JavaScript engines). + * + * @name groupBy + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whoses + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + * @example + * + * async.groupBy(['userId1', 'userId2', 'userId3'], function(userId, callback) { + * db.findById(userId, function(err, user) { + * if (err) return callback(err); + * return callback(null, user.age); + * }); + * }, function(err, result) { + * // result is object containing the userIds grouped by age + * // e.g. { 30: ['userId1', 'userId3'], 42: ['userId2']}; + * }); + */ + function groupBy (coll, iteratee, callback) { + return groupByLimit$1(coll, Infinity, iteratee, callback) + } -function bubbleChange (trackerGroup) { - return function (name, completed, tracker) { - trackerGroup.completion[tracker.id] = completed - if (trackerGroup.finished) return - trackerGroup.emit('change', name || trackerGroup.name, trackerGroup.completed(), trackerGroup) - } -} + /** + * The same as [`groupBy`]{@link module:Collections.groupBy} but runs only a single async operation at a time. + * + * @name groupBySeries + * @static + * @memberOf module:Collections + * @method + * @see [async.groupBy]{@link module:Collections.groupBy} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whoses + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + */ + function groupBySeries (coll, iteratee, callback) { + return groupByLimit$1(coll, 1, iteratee, callback) + } -TrackerGroup.prototype.nameInTree = function () { - var names = [] - var from = this - while (from) { - names.unshift(from.name) - from = from.parentGroup - } - return names.join('/') -} + /** + * Logs the result of an `async` function to the `console`. Only works in + * Node.js or in browsers that support `console.log` and `console.error` (such + * as FF and Chrome). If multiple arguments are returned from the async + * function, `console.log` is called on each argument in order. + * + * @name log + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} function - The function you want to eventually apply + * all arguments to. + * @param {...*} arguments... - Any number of arguments to apply to the function. + * @example + * + * // in a module + * var hello = function(name, callback) { + * setTimeout(function() { + * callback(null, 'hello ' + name); + * }, 1000); + * }; + * + * // in the node repl + * node> async.log(hello, 'world'); + * 'hello world' + */ + var log = consoleFunc('log'); -TrackerGroup.prototype.addUnit = function (unit, weight) { - if (unit.addUnit) { - var toTest = this - while (toTest) { - if (unit === toTest) { - throw new Error( - 'Attempted to add tracker group ' + - unit.name + ' to tree that already includes it ' + - this.nameInTree(this)) - } - toTest = toTest.parentGroup + /** + * The same as [`mapValues`]{@link module:Collections.mapValues} but runs a maximum of `limit` async operations at a + * time. + * + * @name mapValuesLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.mapValues]{@link module:Collections.mapValues} + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + */ + function mapValuesLimit(obj, limit, iteratee, callback) { + callback = once(callback); + var newObj = {}; + var _iteratee = wrapAsync(iteratee); + return eachOfLimit(limit)(obj, (val, key, next) => { + _iteratee(val, key, (err, result) => { + if (err) return next(err); + newObj[key] = result; + next(err); + }); + }, err => callback(err, newObj)); } - unit.parentGroup = this - } - this.weight[unit.id] = weight || 1 - this.totalWeight += this.weight[unit.id] - this.trackers.push(unit) - this.completion[unit.id] = unit.completed() - unit.on('change', this.bubbleChange) - if (!this.finished) this.emit('change', unit.name, this.completion[unit.id], unit) - return unit -} -TrackerGroup.prototype.completed = function () { - if (this.trackers.length === 0) return 0 - var valPerWeight = 1 / this.totalWeight - var completed = 0 - for (var ii = 0; ii < this.trackers.length; ii++) { - var trackerId = this.trackers[ii].id - completed += valPerWeight * this.weight[trackerId] * this.completion[trackerId] - } - return completed -} + var mapValuesLimit$1 = awaitify(mapValuesLimit, 4); -TrackerGroup.prototype.newGroup = function (name, weight) { - return this.addUnit(new TrackerGroup(name), weight) -} + /** + * A relative of [`map`]{@link module:Collections.map}, designed for use with objects. + * + * Produces a new Object by mapping each value of `obj` through the `iteratee` + * function. The `iteratee` is called each `value` and `key` from `obj` and a + * callback for when it has finished processing. Each of these callbacks takes + * two arguments: an `error`, and the transformed item from `obj`. If `iteratee` + * passes an error to its callback, the main `callback` (for the `mapValues` + * function) is immediately called with the error. + * + * Note, the order of the keys in the result is not guaranteed. The keys will + * be roughly in the order they complete, (but this is very engine-specific) + * + * @name mapValues + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * async.mapValues({ + * f1: 'file1', + * f2: 'file2', + * f3: 'file3' + * }, function (file, key, callback) { + * fs.stat(file, callback); + * }, function(err, result) { + * // result is now a map of stats for each file, e.g. + * // { + * // f1: [stats for file1], + * // f2: [stats for file2], + * // f3: [stats for file3] + * // } + * }); + */ + function mapValues(obj, iteratee, callback) { + return mapValuesLimit$1(obj, Infinity, iteratee, callback) + } -TrackerGroup.prototype.newItem = function (name, todo, weight) { - return this.addUnit(new Tracker(name, todo), weight) -} + /** + * The same as [`mapValues`]{@link module:Collections.mapValues} but runs only a single async operation at a time. + * + * @name mapValuesSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.mapValues]{@link module:Collections.mapValues} + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + */ + function mapValuesSeries(obj, iteratee, callback) { + return mapValuesLimit$1(obj, 1, iteratee, callback) + } -TrackerGroup.prototype.newStream = function (name, todo, weight) { - return this.addUnit(new TrackerStream(name, todo), weight) -} + /** + * Caches the results of an async function. When creating a hash to store + * function results against, the callback is omitted from the hash and an + * optional hash function can be used. + * + * **Note: if the async function errs, the result will not be cached and + * subsequent calls will call the wrapped function.** + * + * If no hash function is specified, the first argument is used as a hash key, + * which may work reasonably if it is a string or a data type that converts to a + * distinct string. Note that objects and arrays will not behave reasonably. + * Neither will cases where the other arguments are significant. In such cases, + * specify your own hash function. + * + * The cache of results is exposed as the `memo` property of the function + * returned by `memoize`. + * + * @name memoize + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - The async function to proxy and cache results from. + * @param {Function} hasher - An optional function for generating a custom hash + * for storing results. It has all the arguments applied to it apart from the + * callback, and must be synchronous. + * @returns {AsyncFunction} a memoized version of `fn` + * @example + * + * var slow_fn = function(name, callback) { + * // do something + * callback(null, result); + * }; + * var fn = async.memoize(slow_fn); + * + * // fn can now be used as if it were slow_fn + * fn('some name', function() { + * // callback + * }); + */ + function memoize(fn, hasher = v => v) { + var memo = Object.create(null); + var queues = Object.create(null); + var _fn = wrapAsync(fn); + var memoized = initialParams((args, callback) => { + var key = hasher(...args); + if (key in memo) { + setImmediate$1(() => callback(null, ...memo[key])); + } else if (key in queues) { + queues[key].push(callback); + } else { + queues[key] = [callback]; + _fn(...args, (err, ...resultArgs) => { + // #1465 don't memoize if an error occurred + if (!err) { + memo[key] = resultArgs; + } + var q = queues[key]; + delete queues[key]; + for (var i = 0, l = q.length; i < l; i++) { + q[i](err, ...resultArgs); + } + }); + } + }); + memoized.memo = memo; + memoized.unmemoized = fn; + return memoized; + } -TrackerGroup.prototype.finish = function () { - this.finished = true - if (!this.trackers.length) this.addUnit(new Tracker(), 1, true) - for (var ii = 0; ii < this.trackers.length; ii++) { - var tracker = this.trackers[ii] - tracker.finish() - tracker.removeListener('change', this.bubbleChange) - } - this.emit('change', this.name, 1, this) -} + /** + * Calls `callback` on a later loop around the event loop. In Node.js this just + * calls `process.nextTick`. In the browser it will use `setImmediate` if + * available, otherwise `setTimeout(callback, 0)`, which means other higher + * priority events may precede the execution of `callback`. + * + * This is used internally for browser-compatibility purposes. + * + * @name nextTick + * @static + * @memberOf module:Utils + * @method + * @see [async.setImmediate]{@link module:Utils.setImmediate} + * @category Util + * @param {Function} callback - The function to call on a later loop around + * the event loop. Invoked with (args...). + * @param {...*} args... - any number of additional arguments to pass to the + * callback on the next tick. + * @example + * + * var call_order = []; + * async.nextTick(function() { + * call_order.push('two'); + * // call_order now equals ['one','two'] + * }); + * call_order.push('one'); + * + * async.setImmediate(function (a, b, c) { + * // a, b, and c equal 1, 2, and 3 + * }, 1, 2, 3); + */ + var _defer$1; -var buffer = ' ' -TrackerGroup.prototype.debug = function (depth) { - depth = depth || 0 - var indent = depth ? buffer.substr(0, depth) : '' - var output = indent + (this.name || 'top') + ': ' + this.completed() + '\n' - this.trackers.forEach(function (tracker) { - if (tracker instanceof TrackerGroup) { - output += tracker.debug(depth + 1) + if (hasNextTick) { + _defer$1 = process.nextTick; + } else if (hasSetImmediate) { + _defer$1 = setImmediate; } else { - output += indent + ' ' + tracker.name + ': ' + tracker.completed() + '\n' + _defer$1 = fallback; } - }) - return output -} - - -/***/ }), -/***/ 31375: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + var nextTick = wrap(_defer$1); -"use strict"; + var parallel = awaitify((eachfn, tasks, callback) => { + var results = isArrayLike(tasks) ? [] : {}; -var util = __nccwpck_require__(31669) -var stream = __nccwpck_require__(64043) -var delegate = __nccwpck_require__(61318) -var Tracker = __nccwpck_require__(8074) - -var TrackerStream = module.exports = function (name, size, options) { - stream.Transform.call(this, options) - this.tracker = new Tracker(name, size) - this.name = name - this.id = this.tracker.id - this.tracker.on('change', delegateChange(this)) -} -util.inherits(TrackerStream, stream.Transform) + eachfn(tasks, (task, key, taskCb) => { + wrapAsync(task)((err, ...result) => { + if (result.length < 2) { + [result] = result; + } + results[key] = result; + taskCb(err); + }); + }, err => callback(err, results)); + }, 3); -function delegateChange (trackerStream) { - return function (name, completion, tracker) { - trackerStream.emit('change', name, completion, trackerStream) - } -} + /** + * Run the `tasks` collection of functions in parallel, without waiting until + * the previous function has completed. If any of the functions pass an error to + * its callback, the main `callback` is immediately called with the value of the + * error. Once the `tasks` have completed, the results are passed to the final + * `callback` as an array. + * + * **Note:** `parallel` is about kicking-off I/O tasks in parallel, not about + * parallel execution of code. If your tasks do not use any timers or perform + * any I/O, they will actually be executed in series. Any synchronous setup + * sections for each task will happen one after the other. JavaScript remains + * single-threaded. + * + * **Hint:** Use [`reflect`]{@link module:Utils.reflect} to continue the + * execution of other tasks when a task fails. + * + * It is also possible to use an object instead of an array. Each property will + * be run as a function and the results will be passed to the final `callback` + * as an object instead of an array. This can be a more readable way of handling + * results from {@link async.parallel}. + * + * @name parallel + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of + * [async functions]{@link AsyncFunction} to run. + * Each async function can complete with any number of optional `result` values. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed successfully. This function gets a results array + * (or object) containing all the result arguments passed to the task callbacks. + * Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + * + * @example + * async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ], + * // optional callback + * function(err, results) { + * // the results array will equal ['one','two'] even though + * // the second function had a shorter timeout. + * }); + * + * // an example using an object instead of an array + * async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }, function(err, results) { + * // results is now equals to: {one: 1, two: 2} + * }); + */ + function parallel$1(tasks, callback) { + return parallel(eachOf$1, tasks, callback); + } -TrackerStream.prototype._transform = function (data, encoding, cb) { - this.tracker.completeWork(data.length ? data.length : 1) - this.push(data) - cb() -} + /** + * The same as [`parallel`]{@link module:ControlFlow.parallel} but runs a maximum of `limit` async operations at a + * time. + * + * @name parallelLimit + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.parallel]{@link module:ControlFlow.parallel} + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of + * [async functions]{@link AsyncFunction} to run. + * Each async function can complete with any number of optional `result` values. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed successfully. This function gets a results array + * (or object) containing all the result arguments passed to the task callbacks. + * Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + */ + function parallelLimit(tasks, limit, callback) { + return parallel(eachOfLimit(limit), tasks, callback); + } -TrackerStream.prototype._flush = function (cb) { - this.tracker.finish() - cb() -} + /** + * A queue of tasks for the worker function to complete. + * @typedef {Iterable} QueueObject + * @memberOf module:ControlFlow + * @property {Function} length - a function returning the number of items + * waiting to be processed. Invoke with `queue.length()`. + * @property {boolean} started - a boolean indicating whether or not any + * items have been pushed and processed by the queue. + * @property {Function} running - a function returning the number of items + * currently being processed. Invoke with `queue.running()`. + * @property {Function} workersList - a function returning the array of items + * currently being processed. Invoke with `queue.workersList()`. + * @property {Function} idle - a function returning false if there are items + * waiting or being processed, or true if not. Invoke with `queue.idle()`. + * @property {number} concurrency - an integer for determining how many `worker` + * functions should be run in parallel. This property can be changed after a + * `queue` is created to alter the concurrency on-the-fly. + * @property {number} payload - an integer that specifies how many items are + * passed to the worker function at a time. only applies if this is a + * [cargo]{@link module:ControlFlow.cargo} object + * @property {AsyncFunction} push - add a new task to the `queue`. Calls `callback` + * once the `worker` has finished processing the task. Instead of a single task, + * a `tasks` array can be submitted. The respective callback is used for every + * task in the list. Invoke with `queue.push(task, [callback])`, + * @property {AsyncFunction} unshift - add a new task to the front of the `queue`. + * Invoke with `queue.unshift(task, [callback])`. + * @property {AsyncFunction} pushAsync - the same as `q.push`, except this returns + * a promise that rejects if an error occurs. + * @property {AsyncFunction} unshirtAsync - the same as `q.unshift`, except this returns + * a promise that rejects if an error occurs. + * @property {Function} remove - remove items from the queue that match a test + * function. The test function will be passed an object with a `data` property, + * and a `priority` property, if this is a + * [priorityQueue]{@link module:ControlFlow.priorityQueue} object. + * Invoked with `queue.remove(testFn)`, where `testFn` is of the form + * `function ({data, priority}) {}` and returns a Boolean. + * @property {Function} saturated - a function that sets a callback that is + * called when the number of running workers hits the `concurrency` limit, and + * further tasks will be queued. If the callback is omitted, `q.saturated()` + * returns a promise for the next occurrence. + * @property {Function} unsaturated - a function that sets a callback that is + * called when the number of running workers is less than the `concurrency` & + * `buffer` limits, and further tasks will not be queued. If the callback is + * omitted, `q.unsaturated()` returns a promise for the next occurrence. + * @property {number} buffer - A minimum threshold buffer in order to say that + * the `queue` is `unsaturated`. + * @property {Function} empty - a function that sets a callback that is called + * when the last item from the `queue` is given to a `worker`. If the callback + * is omitted, `q.empty()` returns a promise for the next occurrence. + * @property {Function} drain - a function that sets a callback that is called + * when the last item from the `queue` has returned from the `worker`. If the + * callback is omitted, `q.drain()` returns a promise for the next occurrence. + * @property {Function} error - a function that sets a callback that is called + * when a task errors. Has the signature `function(error, task)`. If the + * callback is omitted, `error()` returns a promise that rejects on the next + * error. + * @property {boolean} paused - a boolean for determining whether the queue is + * in a paused state. + * @property {Function} pause - a function that pauses the processing of tasks + * until `resume()` is called. Invoke with `queue.pause()`. + * @property {Function} resume - a function that resumes the processing of + * queued tasks when the queue is paused. Invoke with `queue.resume()`. + * @property {Function} kill - a function that removes the `drain` callback and + * empties remaining tasks from the queue forcing it to go idle. No more tasks + * should be pushed to the queue after calling this function. Invoke with `queue.kill()`. + * + * @example + * const q = aync.queue(worker, 2) + * q.push(item1) + * q.push(item2) + * q.push(item3) + * // queues are iterable, spread into an array to inspect + * const items = [...q] // [item1, item2, item3] + * // or use for of + * for (let item of q) { + * console.log(item) + * } + * + * q.drain(() => { + * console.log('all done') + * }) + * // or + * await q.drain() + */ -delegate(TrackerStream.prototype, 'tracker') - .method('completed') - .method('addWork') - .method('finish') + /** + * Creates a `queue` object with the specified `concurrency`. Tasks added to the + * `queue` are processed in parallel (up to the `concurrency` limit). If all + * `worker`s are in progress, the task is queued until one becomes available. + * Once a `worker` completes a `task`, that `task`'s callback is called. + * + * @name queue + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} worker - An async function for processing a queued task. + * If you want to handle errors from an individual task, pass a callback to + * `q.push()`. Invoked with (task, callback). + * @param {number} [concurrency=1] - An `integer` for determining how many + * `worker` functions should be run in parallel. If omitted, the concurrency + * defaults to `1`. If the concurrency is `0`, an error is thrown. + * @returns {module:ControlFlow.QueueObject} A queue object to manage the tasks. Callbacks can be + * attached as certain properties to listen for specific events during the + * lifecycle of the queue. + * @example + * + * // create a queue object with concurrency 2 + * var q = async.queue(function(task, callback) { + * console.log('hello ' + task.name); + * callback(); + * }, 2); + * + * // assign a callback + * q.drain(function() { + * console.log('all items have been processed'); + * }); + * // or await the end + * await q.drain() + * + * // assign an error callback + * q.error(function(err, task) { + * console.error('task experienced an error'); + * }); + * + * // add some items to the queue + * q.push({name: 'foo'}, function(err) { + * console.log('finished processing foo'); + * }); + * // callback is optional + * q.push({name: 'bar'}); + * + * // add some items to the queue (batch-wise) + * q.push([{name: 'baz'},{name: 'bay'},{name: 'bax'}], function(err) { + * console.log('finished processing item'); + * }); + * + * // add some items to the front of the queue + * q.unshift({name: 'bar'}, function (err) { + * console.log('finished processing bar'); + * }); + */ + function queue$1 (worker, concurrency) { + var _worker = wrapAsync(worker); + return queue((items, cb) => { + _worker(items[0], cb); + }, concurrency, 1); + } + // Binary min-heap implementation used for priority queue. + // Implementation is stable, i.e. push time is considered for equal priorities + class Heap { + constructor() { + this.heap = []; + this.pushCount = Number.MIN_SAFE_INTEGER; + } -/***/ }), + get length() { + return this.heap.length; + } -/***/ 8074: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + empty () { + this.heap = []; + return this; + } -"use strict"; + percUp(index) { + let p; -var util = __nccwpck_require__(31669) -var TrackerBase = __nccwpck_require__(10165) + while (index > 0 && smaller(this.heap[index], this.heap[p=parent(index)])) { + let t = this.heap[index]; + this.heap[index] = this.heap[p]; + this.heap[p] = t; -var Tracker = module.exports = function (name, todo) { - TrackerBase.call(this, name) - this.workDone = 0 - this.workTodo = todo || 0 -} -util.inherits(Tracker, TrackerBase) + index = p; + } + } -Tracker.prototype.completed = function () { - return this.workTodo === 0 ? 0 : this.workDone / this.workTodo -} + percDown(index) { + let l; -Tracker.prototype.addWork = function (work) { - this.workTodo += work - this.emit('change', this.name, this.completed(), this) -} + while ((l=leftChi(index)) < this.heap.length) { + if (l+1 < this.heap.length && smaller(this.heap[l+1], this.heap[l])) { + l = l+1; + } -Tracker.prototype.completeWork = function (work) { - this.workDone += work - if (this.workDone > this.workTodo) this.workDone = this.workTodo - this.emit('change', this.name, this.completed(), this) -} + if (smaller(this.heap[index], this.heap[l])) { + break; + } -Tracker.prototype.finish = function () { - this.workTodo = this.workDone = 1 - this.emit('change', this.name, 1, this) -} + let t = this.heap[index]; + this.heap[index] = this.heap[l]; + this.heap[l] = t; + index = l; + } + } -/***/ }), + push(node) { + node.pushCount = ++this.pushCount; + this.heap.push(node); + this.percUp(this.heap.length-1); + } -/***/ 99348: -/***/ ((module) => { + unshift(node) { + return this.heap.push(node); + } -// Copyright 2011 Mark Cavage All rights reserved. + shift() { + let [top] = this.heap; + this.heap[0] = this.heap[this.heap.length-1]; + this.heap.pop(); + this.percDown(0); -module.exports = { + return top; + } - newInvalidAsn1Error: function (msg) { - var e = new Error(); - e.name = 'InvalidAsn1Error'; - e.message = msg || ''; - return e; - } + toArray() { + return [...this]; + } -}; + *[Symbol.iterator] () { + for (let i = 0; i < this.heap.length; i++) { + yield this.heap[i].data; + } + } + remove (testFn) { + let j = 0; + for (let i = 0; i < this.heap.length; i++) { + if (!testFn(this.heap[i])) { + this.heap[j] = this.heap[i]; + j++; + } + } -/***/ }), + this.heap.splice(j); -/***/ 194: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + for (let i = parent(this.heap.length-1); i >= 0; i--) { + this.percDown(i); + } -// Copyright 2011 Mark Cavage All rights reserved. + return this; + } + } -var errors = __nccwpck_require__(99348); -var types = __nccwpck_require__(42473); + function leftChi(i) { + return (i<<1)+1; + } -var Reader = __nccwpck_require__(20290); -var Writer = __nccwpck_require__(43200); + function parent(i) { + return ((i+1)>>1)-1; + } + function smaller(x, y) { + if (x.priority !== y.priority) { + return x.priority < y.priority; + } + else { + return x.pushCount < y.pushCount; + } + } -// --- Exports + /** + * The same as [async.queue]{@link module:ControlFlow.queue} only tasks are assigned a priority and + * completed in ascending priority order. + * + * @name priorityQueue + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.queue]{@link module:ControlFlow.queue} + * @category Control Flow + * @param {AsyncFunction} worker - An async function for processing a queued task. + * If you want to handle errors from an individual task, pass a callback to + * `q.push()`. + * Invoked with (task, callback). + * @param {number} concurrency - An `integer` for determining how many `worker` + * functions should be run in parallel. If omitted, the concurrency defaults to + * `1`. If the concurrency is `0`, an error is thrown. + * @returns {module:ControlFlow.QueueObject} A priorityQueue object to manage the tasks. There are two + * differences between `queue` and `priorityQueue` objects: + * * `push(task, priority, [callback])` - `priority` should be a number. If an + * array of `tasks` is given, all tasks will be assigned the same priority. + * * The `unshift` method was removed. + */ + function priorityQueue(worker, concurrency) { + // Start with a normal queue + var q = queue$1(worker, concurrency); -module.exports = { + q._tasks = new Heap(); - Reader: Reader, + // Override push to accept second parameter representing priority + q.push = function(data, priority = 0, callback = () => {}) { + if (typeof callback !== 'function') { + throw new Error('task callback must be a function'); + } + q.started = true; + if (!Array.isArray(data)) { + data = [data]; + } + if (data.length === 0 && q.idle()) { + // call drain immediately if there are no tasks + return setImmediate$1(() => q.drain()); + } - Writer: Writer + for (var i = 0, l = data.length; i < l; i++) { + var item = { + data: data[i], + priority, + callback + }; -}; + q._tasks.push(item); + } -for (var t in types) { - if (types.hasOwnProperty(t)) - module.exports[t] = types[t]; -} -for (var e in errors) { - if (errors.hasOwnProperty(e)) - module.exports[e] = errors[e]; -} + setImmediate$1(q.process); + }; + // Remove unshift function + delete q.unshift; -/***/ }), + return q; + } -/***/ 20290: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * Runs the `tasks` array of functions in parallel, without waiting until the + * previous function has completed. Once any of the `tasks` complete or pass an + * error to its callback, the main `callback` is immediately called. It's + * equivalent to `Promise.race()`. + * + * @name race + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array} tasks - An array containing [async functions]{@link AsyncFunction} + * to run. Each function can complete with an optional `result` value. + * @param {Function} callback - A callback to run once any of the functions have + * completed. This function gets an error or result from the first function that + * completed. Invoked with (err, result). + * @returns undefined + * @example + * + * async.race([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ], + * // main callback + * function(err, result) { + * // the result will be equal to 'two' as it finishes earlier + * }); + */ + function race(tasks, callback) { + callback = once(callback); + if (!Array.isArray(tasks)) return callback(new TypeError('First argument to race must be an array of functions')); + if (!tasks.length) return callback(); + for (var i = 0, l = tasks.length; i < l; i++) { + wrapAsync(tasks[i])(callback); + } + } -// Copyright 2011 Mark Cavage All rights reserved. + var race$1 = awaitify(race, 2); -var assert = __nccwpck_require__(42357); -var Buffer = __nccwpck_require__(15118).Buffer; + /** + * Same as [`reduce`]{@link module:Collections.reduce}, only operates on `array` in reverse order. + * + * @name reduceRight + * @static + * @memberOf module:Collections + * @method + * @see [async.reduce]{@link module:Collections.reduce} + * @alias foldr + * @category Collection + * @param {Array} array - A collection to iterate over. + * @param {*} memo - The initial state of the reduction. + * @param {AsyncFunction} iteratee - A function applied to each item in the + * array to produce the next step in the reduction. + * The `iteratee` should complete with the next state of the reduction. + * If the iteratee complete with an error, the reduction is stopped and the + * main `callback` is immediately called with the error. + * Invoked with (memo, item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the reduced value. Invoked with + * (err, result). + * @returns {Promise} a promise, if no callback is passed + */ + function reduceRight (array, memo, iteratee, callback) { + var reversed = [...array].reverse(); + return reduce$1(reversed, memo, iteratee, callback); + } -var ASN1 = __nccwpck_require__(42473); -var errors = __nccwpck_require__(99348); + /** + * Wraps the async function in another function that always completes with a + * result object, even when it errors. + * + * The result object has either the property `error` or `value`. + * + * @name reflect + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - The async function you want to wrap + * @returns {Function} - A function that always passes null to it's callback as + * the error. The second argument to the callback will be an `object` with + * either an `error` or a `value` property. + * @example + * + * async.parallel([ + * async.reflect(function(callback) { + * // do some stuff ... + * callback(null, 'one'); + * }), + * async.reflect(function(callback) { + * // do some more stuff but error ... + * callback('bad stuff happened'); + * }), + * async.reflect(function(callback) { + * // do some more stuff ... + * callback(null, 'two'); + * }) + * ], + * // optional callback + * function(err, results) { + * // values + * // results[0].value = 'one' + * // results[1].error = 'bad stuff happened' + * // results[2].value = 'two' + * }); + */ + function reflect(fn) { + var _fn = wrapAsync(fn); + return initialParams(function reflectOn(args, reflectCallback) { + args.push((error, ...cbArgs) => { + let retVal = {}; + if (error) { + retVal.error = error; + } + if (cbArgs.length > 0){ + var value = cbArgs; + if (cbArgs.length <= 1) { + [value] = cbArgs; + } + retVal.value = value; + } + reflectCallback(null, retVal); + }); + return _fn.apply(this, args); + }); + } -// --- Globals + /** + * A helper function that wraps an array or an object of functions with `reflect`. + * + * @name reflectAll + * @static + * @memberOf module:Utils + * @method + * @see [async.reflect]{@link module:Utils.reflect} + * @category Util + * @param {Array|Object|Iterable} tasks - The collection of + * [async functions]{@link AsyncFunction} to wrap in `async.reflect`. + * @returns {Array} Returns an array of async functions, each wrapped in + * `async.reflect` + * @example + * + * let tasks = [ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * // do some more stuff but error ... + * callback(new Error('bad stuff happened')); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]; + * + * async.parallel(async.reflectAll(tasks), + * // optional callback + * function(err, results) { + * // values + * // results[0].value = 'one' + * // results[1].error = Error('bad stuff happened') + * // results[2].value = 'two' + * }); + * + * // an example using an object instead of an array + * let tasks = { + * one: function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * two: function(callback) { + * callback('two'); + * }, + * three: function(callback) { + * setTimeout(function() { + * callback(null, 'three'); + * }, 100); + * } + * }; + * + * async.parallel(async.reflectAll(tasks), + * // optional callback + * function(err, results) { + * // values + * // results.one.value = 'one' + * // results.two.error = 'two' + * // results.three.value = 'three' + * }); + */ + function reflectAll(tasks) { + var results; + if (Array.isArray(tasks)) { + results = tasks.map(reflect); + } else { + results = {}; + Object.keys(tasks).forEach(key => { + results[key] = reflect.call(this, tasks[key]); + }); + } + return results; + } -var newInvalidAsn1Error = errors.newInvalidAsn1Error; + function reject(eachfn, arr, _iteratee, callback) { + const iteratee = wrapAsync(_iteratee); + return _filter(eachfn, arr, (value, cb) => { + iteratee(value, (err, v) => { + cb(err, !v); + }); + }, callback); + } + /** + * The opposite of [`filter`]{@link module:Collections.filter}. Removes values that pass an `async` truth test. + * + * @name reject + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * async.reject(['file1','file2','file3'], function(filePath, callback) { + * fs.access(filePath, function(err) { + * callback(null, !err) + * }); + * }, function(err, results) { + * // results now equals an array of missing files + * createFiles(results); + * }); + */ + function reject$1 (coll, iteratee, callback) { + return reject(eachOf$1, coll, iteratee, callback) + } + var reject$2 = awaitify(reject$1, 3); + /** + * The same as [`reject`]{@link module:Collections.reject} but runs a maximum of `limit` async operations at a + * time. + * + * @name rejectLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.reject]{@link module:Collections.reject} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ + function rejectLimit (coll, limit, iteratee, callback) { + return reject(eachOfLimit(limit), coll, iteratee, callback) + } + var rejectLimit$1 = awaitify(rejectLimit, 4); -// --- API + /** + * The same as [`reject`]{@link module:Collections.reject} but runs only a single async operation at a time. + * + * @name rejectSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.reject]{@link module:Collections.reject} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ + function rejectSeries (coll, iteratee, callback) { + return reject(eachOfSeries$1, coll, iteratee, callback) + } + var rejectSeries$1 = awaitify(rejectSeries, 3); -function Reader(data) { - if (!data || !Buffer.isBuffer(data)) - throw new TypeError('data must be a node Buffer'); + function constant$1(value) { + return function () { + return value; + } + } - this._buf = data; - this._size = data.length; + /** + * Attempts to get a successful response from `task` no more than `times` times + * before returning an error. If the task is successful, the `callback` will be + * passed the result of the successful task. If all attempts fail, the callback + * will be passed the error and result (if any) of the final attempt. + * + * @name retry + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @see [async.retryable]{@link module:ControlFlow.retryable} + * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - Can be either an + * object with `times` and `interval` or a number. + * * `times` - The number of attempts to make before giving up. The default + * is `5`. + * * `interval` - The time to wait between retries, in milliseconds. The + * default is `0`. The interval may also be specified as a function of the + * retry count (see example). + * * `errorFilter` - An optional synchronous function that is invoked on + * erroneous result. If it returns `true` the retry attempts will continue; + * if the function returns `false` the retry flow is aborted with the current + * attempt's error and result being returned to the final callback. + * Invoked with (err). + * * If `opts` is a number, the number specifies the number of times to retry, + * with the default interval of `0`. + * @param {AsyncFunction} task - An async function to retry. + * Invoked with (callback). + * @param {Function} [callback] - An optional callback which is called when the + * task has succeeded, or after the final failed attempt. It receives the `err` + * and `result` arguments of the last attempt at completing the `task`. Invoked + * with (err, results). + * @returns {Promise} a promise if no callback provided + * + * @example + * + * // The `retry` function can be used as a stand-alone control flow by passing + * // a callback, as shown below: + * + * // try calling apiMethod 3 times + * async.retry(3, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod 3 times, waiting 200 ms between each retry + * async.retry({times: 3, interval: 200}, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod 10 times with exponential backoff + * // (i.e. intervals of 100, 200, 400, 800, 1600, ... milliseconds) + * async.retry({ + * times: 10, + * interval: function(retryCount) { + * return 50 * Math.pow(2, retryCount); + * } + * }, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod the default 5 times no delay between each retry + * async.retry(apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod only when error condition satisfies, all other + * // errors will abort the retry control flow and return to final callback + * async.retry({ + * errorFilter: function(err) { + * return err.message === 'Temporary error'; // only retry on a specific error + * } + * }, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // to retry individual methods that are not as reliable within other + * // control flow functions, use the `retryable` wrapper: + * async.auto({ + * users: api.getUsers.bind(api), + * payments: async.retryable(3, api.getPayments.bind(api)) + * }, function(err, results) { + * // do something with the results + * }); + * + */ + const DEFAULT_TIMES = 5; + const DEFAULT_INTERVAL = 0; - // These hold the "current" state - this._len = 0; - this._offset = 0; -} + function retry(opts, task, callback) { + var options = { + times: DEFAULT_TIMES, + intervalFunc: constant$1(DEFAULT_INTERVAL) + }; -Object.defineProperty(Reader.prototype, 'length', { - enumerable: true, - get: function () { return (this._len); } -}); + if (arguments.length < 3 && typeof opts === 'function') { + callback = task || promiseCallback(); + task = opts; + } else { + parseTimes(options, opts); + callback = callback || promiseCallback(); + } -Object.defineProperty(Reader.prototype, 'offset', { - enumerable: true, - get: function () { return (this._offset); } -}); + if (typeof task !== 'function') { + throw new Error("Invalid arguments for async.retry"); + } -Object.defineProperty(Reader.prototype, 'remain', { - get: function () { return (this._size - this._offset); } -}); + var _task = wrapAsync(task); -Object.defineProperty(Reader.prototype, 'buffer', { - get: function () { return (this._buf.slice(this._offset)); } -}); + var attempt = 1; + function retryAttempt() { + _task((err, ...args) => { + if (err === false) return + if (err && attempt++ < options.times && + (typeof options.errorFilter != 'function' || + options.errorFilter(err))) { + setTimeout(retryAttempt, options.intervalFunc(attempt - 1)); + } else { + callback(err, ...args); + } + }); + } + retryAttempt(); + return callback[PROMISE_SYMBOL] + } -/** - * Reads a single byte and advances offset; you can pass in `true` to make this - * a "peek" operation (i.e., get the byte, but don't advance the offset). - * - * @param {Boolean} peek true means don't move offset. - * @return {Number} the next byte, null if not enough data. - */ -Reader.prototype.readByte = function (peek) { - if (this._size - this._offset < 1) - return null; + function parseTimes(acc, t) { + if (typeof t === 'object') { + acc.times = +t.times || DEFAULT_TIMES; - var b = this._buf[this._offset] & 0xff; + acc.intervalFunc = typeof t.interval === 'function' ? + t.interval : + constant$1(+t.interval || DEFAULT_INTERVAL); - if (!peek) - this._offset += 1; + acc.errorFilter = t.errorFilter; + } else if (typeof t === 'number' || typeof t === 'string') { + acc.times = +t || DEFAULT_TIMES; + } else { + throw new Error("Invalid arguments for async.retry"); + } + } - return b; -}; + /** + * A close relative of [`retry`]{@link module:ControlFlow.retry}. This method + * wraps a task and makes it retryable, rather than immediately calling it + * with retries. + * + * @name retryable + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.retry]{@link module:ControlFlow.retry} + * @category Control Flow + * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - optional + * options, exactly the same as from `retry`, except for a `opts.arity` that + * is the arity of the `task` function, defaulting to `task.length` + * @param {AsyncFunction} task - the asynchronous function to wrap. + * This function will be passed any arguments passed to the returned wrapper. + * Invoked with (...args, callback). + * @returns {AsyncFunction} The wrapped function, which when invoked, will + * retry on an error, based on the parameters specified in `opts`. + * This function will accept the same parameters as `task`. + * @example + * + * async.auto({ + * dep1: async.retryable(3, getFromFlakyService), + * process: ["dep1", async.retryable(3, function (results, cb) { + * maybeProcessData(results.dep1, cb); + * })] + * }, callback); + */ + function retryable (opts, task) { + if (!task) { + task = opts; + opts = null; + } + let arity = (opts && opts.arity) || task.length; + if (isAsync(task)) { + arity += 1; + } + var _task = wrapAsync(task); + return initialParams((args, callback) => { + if (args.length < arity - 1 || callback == null) { + args.push(callback); + callback = promiseCallback(); + } + function taskFn(cb) { + _task(...args, cb); + } + if (opts) retry(opts, taskFn, callback); + else retry(taskFn, callback); -Reader.prototype.peek = function () { - return this.readByte(true); -}; + return callback[PROMISE_SYMBOL] + }); + } + /** + * Run the functions in the `tasks` collection in series, each one running once + * the previous function has completed. If any functions in the series pass an + * error to its callback, no more functions are run, and `callback` is + * immediately called with the value of the error. Otherwise, `callback` + * receives an array of results when `tasks` have completed. + * + * It is also possible to use an object instead of an array. Each property will + * be run as a function, and the results will be passed to the final `callback` + * as an object instead of an array. This can be a more readable way of handling + * results from {@link async.series}. + * + * **Note** that while many implementations preserve the order of object + * properties, the [ECMAScript Language Specification](http://www.ecma-international.org/ecma-262/5.1/#sec-8.6) + * explicitly states that + * + * > The mechanics and order of enumerating the properties is not specified. + * + * So if you rely on the order in which your series of functions are executed, + * and want this to work on all platforms, consider using an array. + * + * @name series + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing + * [async functions]{@link AsyncFunction} to run in series. + * Each function can complete with any number of optional `result` values. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed. This function gets a results array (or object) + * containing all the result arguments passed to the `task` callbacks. Invoked + * with (err, result). + * @return {Promise} a promise, if no callback is passed + * @example + * async.series([ + * function(callback) { + * // do some stuff ... + * callback(null, 'one'); + * }, + * function(callback) { + * // do some more stuff ... + * callback(null, 'two'); + * } + * ], + * // optional callback + * function(err, results) { + * // results is now equal to ['one', 'two'] + * }); + * + * async.series({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback){ + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }, function(err, results) { + * // results is now equal to: {one: 1, two: 2} + * }); + */ + function series(tasks, callback) { + return parallel(eachOfSeries$1, tasks, callback); + } -/** - * Reads a (potentially) variable length off the BER buffer. This call is - * not really meant to be called directly, as callers have to manipulate - * the internal buffer afterwards. - * - * As a result of this call, you can call `Reader.length`, until the - * next thing called that does a readLength. - * - * @return {Number} the amount of offset to advance the buffer. - * @throws {InvalidAsn1Error} on bad ASN.1 - */ -Reader.prototype.readLength = function (offset) { - if (offset === undefined) - offset = this._offset; + /** + * Returns `true` if at least one element in the `coll` satisfies an async test. + * If any iteratee call returns `true`, the main `callback` is immediately + * called. + * + * @name some + * @static + * @memberOf module:Collections + * @method + * @alias any + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in parallel. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * async.some(['file1','file2','file3'], function(filePath, callback) { + * fs.access(filePath, function(err) { + * callback(null, !err) + * }); + * }, function(err, result) { + * // if result is true then at least one of the files exists + * }); + */ + function some(coll, iteratee, callback) { + return _createTester(Boolean, res => res)(eachOf$1, coll, iteratee, callback) + } + var some$1 = awaitify(some, 3); - if (offset >= this._size) - return null; + /** + * The same as [`some`]{@link module:Collections.some} but runs a maximum of `limit` async operations at a time. + * + * @name someLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.some]{@link module:Collections.some} + * @alias anyLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in parallel. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ + function someLimit(coll, limit, iteratee, callback) { + return _createTester(Boolean, res => res)(eachOfLimit(limit), coll, iteratee, callback) + } + var someLimit$1 = awaitify(someLimit, 4); - var lenB = this._buf[offset++] & 0xff; - if (lenB === null) - return null; + /** + * The same as [`some`]{@link module:Collections.some} but runs only a single async operation at a time. + * + * @name someSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.some]{@link module:Collections.some} + * @alias anySeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in series. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ + function someSeries(coll, iteratee, callback) { + return _createTester(Boolean, res => res)(eachOfSeries$1, coll, iteratee, callback) + } + var someSeries$1 = awaitify(someSeries, 3); - if ((lenB & 0x80) === 0x80) { - lenB &= 0x7f; + /** + * Sorts a list by the results of running each `coll` value through an async + * `iteratee`. + * + * @name sortBy + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a value to use as the sort criteria as + * its `result`. + * Invoked with (item, callback). + * @param {Function} callback - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is the items + * from the original `coll` sorted by the values returned by the `iteratee` + * calls. Invoked with (err, results). + * @returns {Promise} a promise, if no callback passed + * @example + * + * async.sortBy(['file1','file2','file3'], function(file, callback) { + * fs.stat(file, function(err, stats) { + * callback(err, stats.mtime); + * }); + * }, function(err, results) { + * // results is now the original array of files sorted by + * // modified date + * }); + * + * // By modifying the callback parameter the + * // sorting order can be influenced: + * + * // ascending order + * async.sortBy([1,9,3,5], function(x, callback) { + * callback(null, x); + * }, function(err,result) { + * // result callback + * }); + * + * // descending order + * async.sortBy([1,9,3,5], function(x, callback) { + * callback(null, x*-1); //<- x*-1 instead of x, turns the order around + * }, function(err,result) { + * // result callback + * }); + */ + function sortBy (coll, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return map$1(coll, (x, iterCb) => { + _iteratee(x, (err, criteria) => { + if (err) return iterCb(err); + iterCb(err, {value: x, criteria}); + }); + }, (err, results) => { + if (err) return callback(err); + callback(null, results.sort(comparator).map(v => v.value)); + }); - if (lenB === 0) - throw newInvalidAsn1Error('Indefinite length not supported'); + function comparator(left, right) { + var a = left.criteria, b = right.criteria; + return a < b ? -1 : a > b ? 1 : 0; + } + } + var sortBy$1 = awaitify(sortBy, 3); - if (lenB > 4) - throw newInvalidAsn1Error('encoding too long'); + /** + * Sets a time limit on an asynchronous function. If the function does not call + * its callback within the specified milliseconds, it will be called with a + * timeout error. The code property for the error object will be `'ETIMEDOUT'`. + * + * @name timeout + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} asyncFn - The async function to limit in time. + * @param {number} milliseconds - The specified time limit. + * @param {*} [info] - Any variable you want attached (`string`, `object`, etc) + * to timeout Error for more information.. + * @returns {AsyncFunction} Returns a wrapped function that can be used with any + * of the control flow functions. + * Invoke this function with the same parameters as you would `asyncFunc`. + * @example + * + * function myFunction(foo, callback) { + * doAsyncTask(foo, function(err, data) { + * // handle errors + * if (err) return callback(err); + * + * // do some stuff ... + * + * // return processed data + * return callback(null, data); + * }); + * } + * + * var wrapped = async.timeout(myFunction, 1000); + * + * // call `wrapped` as you would `myFunction` + * wrapped({ bar: 'bar' }, function(err, data) { + * // if `myFunction` takes < 1000 ms to execute, `err` + * // and `data` will have their expected values + * + * // else `err` will be an Error with the code 'ETIMEDOUT' + * }); + */ + function timeout(asyncFn, milliseconds, info) { + var fn = wrapAsync(asyncFn); - if (this._size - offset < lenB) - return null; + return initialParams((args, callback) => { + var timedOut = false; + var timer; - this._len = 0; - for (var i = 0; i < lenB; i++) - this._len = (this._len << 8) + (this._buf[offset++] & 0xff); + function timeoutCallback() { + var name = asyncFn.name || 'anonymous'; + var error = new Error('Callback function "' + name + '" timed out.'); + error.code = 'ETIMEDOUT'; + if (info) { + error.info = info; + } + timedOut = true; + callback(error); + } - } else { - // Wasn't a variable length - this._len = lenB; - } + args.push((...cbArgs) => { + if (!timedOut) { + callback(...cbArgs); + clearTimeout(timer); + } + }); - return offset; -}; + // setup timer and call original function + timer = setTimeout(timeoutCallback, milliseconds); + fn(...args); + }); + } + function range(size) { + var result = Array(size); + while (size--) { + result[size] = size; + } + return result; + } -/** - * Parses the next sequence in this BER buffer. - * - * To get the length of the sequence, call `Reader.length`. - * - * @return {Number} the sequence's tag. - */ -Reader.prototype.readSequence = function (tag) { - var seq = this.peek(); - if (seq === null) - return null; - if (tag !== undefined && tag !== seq) - throw newInvalidAsn1Error('Expected 0x' + tag.toString(16) + - ': got 0x' + seq.toString(16)); + /** + * The same as [times]{@link module:ControlFlow.times} but runs a maximum of `limit` async operations at a + * time. + * + * @name timesLimit + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.times]{@link module:ControlFlow.times} + * @category Control Flow + * @param {number} count - The number of times to run the function. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see [async.map]{@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + */ + function timesLimit(count, limit, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return mapLimit$1(range(count), limit, _iteratee, callback); + } - var o = this.readLength(this._offset + 1); // stored in `length` - if (o === null) - return null; + /** + * Calls the `iteratee` function `n` times, and accumulates results in the same + * manner you would use with [map]{@link module:Collections.map}. + * + * @name times + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.map]{@link module:Collections.map} + * @category Control Flow + * @param {number} n - The number of times to run the function. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see {@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + * @example + * + * // Pretend this is some complicated async factory + * var createUser = function(id, callback) { + * callback(null, { + * id: 'user' + id + * }); + * }; + * + * // generate 5 users + * async.times(5, function(n, next) { + * createUser(n, function(err, user) { + * next(err, user); + * }); + * }, function(err, users) { + * // we should now have 5 users + * }); + */ + function times (n, iteratee, callback) { + return timesLimit(n, Infinity, iteratee, callback) + } - this._offset = o; - return seq; -}; + /** + * The same as [times]{@link module:ControlFlow.times} but runs only a single async operation at a time. + * + * @name timesSeries + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.times]{@link module:ControlFlow.times} + * @category Control Flow + * @param {number} n - The number of times to run the function. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see {@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + */ + function timesSeries (n, iteratee, callback) { + return timesLimit(n, 1, iteratee, callback) + } + /** + * A relative of `reduce`. Takes an Object or Array, and iterates over each + * element in parallel, each step potentially mutating an `accumulator` value. + * The type of the accumulator defaults to the type of collection passed in. + * + * @name transform + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {*} [accumulator] - The initial state of the transform. If omitted, + * it will default to an empty Object or Array, depending on the type of `coll` + * @param {AsyncFunction} iteratee - A function applied to each item in the + * collection that potentially modifies the accumulator. + * Invoked with (accumulator, item, key, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the transformed accumulator. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * async.transform([1,2,3], function(acc, item, index, callback) { + * // pointless async: + * process.nextTick(function() { + * acc[index] = item * 2 + * callback(null) + * }); + * }, function(err, result) { + * // result is now equal to [2, 4, 6] + * }); + * + * @example + * + * async.transform({a: 1, b: 2, c: 3}, function (obj, val, key, callback) { + * setImmediate(function () { + * obj[key] = val * 2; + * callback(); + * }) + * }, function (err, result) { + * // result is equal to {a: 2, b: 4, c: 6} + * }) + */ + function transform (coll, accumulator, iteratee, callback) { + if (arguments.length <= 3 && typeof accumulator === 'function') { + callback = iteratee; + iteratee = accumulator; + accumulator = Array.isArray(coll) ? [] : {}; + } + callback = once(callback || promiseCallback()); + var _iteratee = wrapAsync(iteratee); -Reader.prototype.readInt = function () { - return this._readTag(ASN1.Integer); -}; + eachOf$1(coll, (v, k, cb) => { + _iteratee(accumulator, v, k, cb); + }, err => callback(err, accumulator)); + return callback[PROMISE_SYMBOL] + } + /** + * It runs each task in series but stops whenever any of the functions were + * successful. If one of the tasks were successful, the `callback` will be + * passed the result of the successful task. If all tasks fail, the callback + * will be passed the error and result (if any) of the final attempt. + * + * @name tryEach + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing functions to + * run, each function is passed a `callback(err, result)` it must call on + * completion with an error `err` (which can be `null`) and an optional `result` + * value. + * @param {Function} [callback] - An optional callback which is called when one + * of the tasks has succeeded, or all have failed. It receives the `err` and + * `result` arguments of the last attempt at completing the `task`. Invoked with + * (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * async.tryEach([ + * function getDataFromFirstWebsite(callback) { + * // Try getting the data from the first website + * callback(err, data); + * }, + * function getDataFromSecondWebsite(callback) { + * // First website failed, + * // Try getting the data from the backup website + * callback(err, data); + * } + * ], + * // optional callback + * function(err, results) { + * Now do something with the data. + * }); + * + */ + function tryEach(tasks, callback) { + var error = null; + var result; + return eachSeries$1(tasks, (task, taskCb) => { + wrapAsync(task)((err, ...args) => { + if (err === false) return taskCb(err); -Reader.prototype.readBoolean = function () { - return (this._readTag(ASN1.Boolean) === 0 ? false : true); -}; + if (args.length < 2) { + [result] = args; + } else { + result = args; + } + error = err; + taskCb(err ? null : {}); + }); + }, () => callback(error, result)); + } + var tryEach$1 = awaitify(tryEach); -Reader.prototype.readEnumeration = function () { - return this._readTag(ASN1.Enumeration); -}; + /** + * Undoes a [memoize]{@link module:Utils.memoize}d function, reverting it to the original, + * unmemoized form. Handy for testing. + * + * @name unmemoize + * @static + * @memberOf module:Utils + * @method + * @see [async.memoize]{@link module:Utils.memoize} + * @category Util + * @param {AsyncFunction} fn - the memoized function + * @returns {AsyncFunction} a function that calls the original unmemoized function + */ + function unmemoize(fn) { + return (...args) => { + return (fn.unmemoized || fn)(...args); + }; + } + /** + * Repeatedly call `iteratee`, while `test` returns `true`. Calls `callback` when + * stopped, or an error occurs. + * + * @name whilst + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} test - asynchronous truth test to perform before each + * execution of `iteratee`. Invoked with (). + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` passes. Invoked with (callback). + * @param {Function} [callback] - A callback which is called after the test + * function has failed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + * @example + * + * var count = 0; + * async.whilst( + * function test(cb) { cb(null, count < 5); }, + * function iter(callback) { + * count++; + * setTimeout(function() { + * callback(null, count); + * }, 1000); + * }, + * function (err, n) { + * // 5 seconds have passed, n = 5 + * } + * ); + */ + function whilst(test, iteratee, callback) { + callback = onlyOnce(callback); + var _fn = wrapAsync(iteratee); + var _test = wrapAsync(test); + var results = []; -Reader.prototype.readString = function (tag, retbuf) { - if (!tag) - tag = ASN1.OctetString; + function next(err, ...rest) { + if (err) return callback(err); + results = rest; + if (err === false) return; + _test(check); + } - var b = this.peek(); - if (b === null) - return null; + function check(err, truth) { + if (err) return callback(err); + if (err === false) return; + if (!truth) return callback(null, ...results); + _fn(next); + } - if (b !== tag) - throw newInvalidAsn1Error('Expected 0x' + tag.toString(16) + - ': got 0x' + b.toString(16)); + return _test(check); + } + var whilst$1 = awaitify(whilst, 3); - var o = this.readLength(this._offset + 1); // stored in `length` + /** + * Repeatedly call `iteratee` until `test` returns `true`. Calls `callback` when + * stopped, or an error occurs. `callback` will be passed an error and any + * arguments passed to the final `iteratee`'s callback. + * + * The inverse of [whilst]{@link module:ControlFlow.whilst}. + * + * @name until + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.whilst]{@link module:ControlFlow.whilst} + * @category Control Flow + * @param {AsyncFunction} test - asynchronous truth test to perform before each + * execution of `iteratee`. Invoked with (callback). + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` fails. Invoked with (callback). + * @param {Function} [callback] - A callback which is called after the test + * function has passed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if a callback is not passed + * + * @example + * const results = [] + * let finished = false + * async.until(function test(page, cb) { + * cb(null, finished) + * }, function iter(next) { + * fetchPage(url, (err, body) => { + * if (err) return next(err) + * results = results.concat(body.objects) + * finished = !!body.next + * next(err) + * }) + * }, function done (err) { + * // all pages have been fetched + * }) + */ + function until(test, iteratee, callback) { + const _test = wrapAsync(test); + return whilst$1((cb) => _test((err, truth) => cb (err, !truth)), iteratee, callback); + } - if (o === null) - return null; + /** + * Runs the `tasks` array of functions in series, each passing their results to + * the next in the array. However, if any of the `tasks` pass an error to their + * own callback, the next function is not executed, and the main `callback` is + * immediately called with the error. + * + * @name waterfall + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array} tasks - An array of [async functions]{@link AsyncFunction} + * to run. + * Each function should complete with any number of `result` values. + * The `result` values will be passed as arguments, in order, to the next task. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed. This will be passed the results of the last task's + * callback. Invoked with (err, [results]). + * @returns undefined + * @example + * + * async.waterfall([ + * function(callback) { + * callback(null, 'one', 'two'); + * }, + * function(arg1, arg2, callback) { + * // arg1 now equals 'one' and arg2 now equals 'two' + * callback(null, 'three'); + * }, + * function(arg1, callback) { + * // arg1 now equals 'three' + * callback(null, 'done'); + * } + * ], function (err, result) { + * // result now equals 'done' + * }); + * + * // Or, with named functions: + * async.waterfall([ + * myFirstFunction, + * mySecondFunction, + * myLastFunction, + * ], function (err, result) { + * // result now equals 'done' + * }); + * function myFirstFunction(callback) { + * callback(null, 'one', 'two'); + * } + * function mySecondFunction(arg1, arg2, callback) { + * // arg1 now equals 'one' and arg2 now equals 'two' + * callback(null, 'three'); + * } + * function myLastFunction(arg1, callback) { + * // arg1 now equals 'three' + * callback(null, 'done'); + * } + */ + function waterfall (tasks, callback) { + callback = once(callback); + if (!Array.isArray(tasks)) return callback(new Error('First argument to waterfall must be an array of functions')); + if (!tasks.length) return callback(); + var taskIndex = 0; - if (this.length > this._size - o) - return null; + function nextTask(args) { + var task = wrapAsync(tasks[taskIndex++]); + task(...args, onlyOnce(next)); + } - this._offset = o; + function next(err, ...args) { + if (err === false) return + if (err || taskIndex === tasks.length) { + return callback(err, ...args); + } + nextTask(args); + } - if (this.length === 0) - return retbuf ? Buffer.alloc(0) : ''; + nextTask([]); + } - var str = this._buf.slice(this._offset, this._offset + this.length); - this._offset += this.length; + var waterfall$1 = awaitify(waterfall); - return retbuf ? str : str.toString('utf8'); -}; + /** + * An "async function" in the context of Async is an asynchronous function with + * a variable number of parameters, with the final parameter being a callback. + * (`function (arg1, arg2, ..., callback) {}`) + * The final callback is of the form `callback(err, results...)`, which must be + * called once the function is completed. The callback should be called with a + * Error as its first argument to signal that an error occurred. + * Otherwise, if no error occurred, it should be called with `null` as the first + * argument, and any additional `result` arguments that may apply, to signal + * successful completion. + * The callback must be called exactly once, ideally on a later tick of the + * JavaScript event loop. + * + * This type of function is also referred to as a "Node-style async function", + * or a "continuation passing-style function" (CPS). Most of the methods of this + * library are themselves CPS/Node-style async functions, or functions that + * return CPS/Node-style async functions. + * + * Wherever we accept a Node-style async function, we also directly accept an + * [ES2017 `async` function]{@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function}. + * In this case, the `async` function will not be passed a final callback + * argument, and any thrown error will be used as the `err` argument of the + * implicit callback, and the return value will be used as the `result` value. + * (i.e. a `rejected` of the returned Promise becomes the `err` callback + * argument, and a `resolved` value becomes the `result`.) + * + * Note, due to JavaScript limitations, we can only detect native `async` + * functions and not transpilied implementations. + * Your environment must have `async`/`await` support for this to work. + * (e.g. Node > v7.6, or a recent version of a modern browser). + * If you are using `async` functions through a transpiler (e.g. Babel), you + * must still wrap the function with [asyncify]{@link module:Utils.asyncify}, + * because the `async function` will be compiled to an ordinary function that + * returns a promise. + * + * @typedef {Function} AsyncFunction + * @static + */ -Reader.prototype.readOID = function (tag) { - if (!tag) - tag = ASN1.OID; + var index = { + apply, + applyEach: applyEach$1, + applyEachSeries, + asyncify, + auto, + autoInject, + cargo, + cargoQueue: cargo$1, + compose, + concat: concat$1, + concatLimit: concatLimit$1, + concatSeries: concatSeries$1, + constant, + detect: detect$1, + detectLimit: detectLimit$1, + detectSeries: detectSeries$1, + dir, + doUntil, + doWhilst: doWhilst$1, + each, + eachLimit: eachLimit$2, + eachOf: eachOf$1, + eachOfLimit: eachOfLimit$2, + eachOfSeries: eachOfSeries$1, + eachSeries: eachSeries$1, + ensureAsync, + every: every$1, + everyLimit: everyLimit$1, + everySeries: everySeries$1, + filter: filter$1, + filterLimit: filterLimit$1, + filterSeries: filterSeries$1, + forever: forever$1, + groupBy, + groupByLimit: groupByLimit$1, + groupBySeries, + log, + map: map$1, + mapLimit: mapLimit$1, + mapSeries: mapSeries$1, + mapValues, + mapValuesLimit: mapValuesLimit$1, + mapValuesSeries, + memoize, + nextTick, + parallel: parallel$1, + parallelLimit, + priorityQueue, + queue: queue$1, + race: race$1, + reduce: reduce$1, + reduceRight, + reflect, + reflectAll, + reject: reject$2, + rejectLimit: rejectLimit$1, + rejectSeries: rejectSeries$1, + retry, + retryable, + seq, + series, + setImmediate: setImmediate$1, + some: some$1, + someLimit: someLimit$1, + someSeries: someSeries$1, + sortBy: sortBy$1, + timeout, + times, + timesLimit, + timesSeries, + transform, + tryEach: tryEach$1, + unmemoize, + until, + waterfall: waterfall$1, + whilst: whilst$1, - var b = this.readString(tag, true); - if (b === null) - return null; + // aliases + all: every$1, + allLimit: everyLimit$1, + allSeries: everySeries$1, + any: some$1, + anyLimit: someLimit$1, + anySeries: someSeries$1, + find: detect$1, + findLimit: detectLimit$1, + findSeries: detectSeries$1, + flatMap: concat$1, + flatMapLimit: concatLimit$1, + flatMapSeries: concatSeries$1, + forEach: each, + forEachSeries: eachSeries$1, + forEachLimit: eachLimit$2, + forEachOf: eachOf$1, + forEachOfSeries: eachOfSeries$1, + forEachOfLimit: eachOfLimit$2, + inject: reduce$1, + foldl: reduce$1, + foldr: reduceRight, + select: filter$1, + selectLimit: filterLimit$1, + selectSeries: filterSeries$1, + wrapSync: asyncify, + during: whilst$1, + doDuring: doWhilst$1 + }; - var values = []; - var value = 0; + exports.default = index; + exports.apply = apply; + exports.applyEach = applyEach$1; + exports.applyEachSeries = applyEachSeries; + exports.asyncify = asyncify; + exports.auto = auto; + exports.autoInject = autoInject; + exports.cargo = cargo; + exports.cargoQueue = cargo$1; + exports.compose = compose; + exports.concat = concat$1; + exports.concatLimit = concatLimit$1; + exports.concatSeries = concatSeries$1; + exports.constant = constant; + exports.detect = detect$1; + exports.detectLimit = detectLimit$1; + exports.detectSeries = detectSeries$1; + exports.dir = dir; + exports.doUntil = doUntil; + exports.doWhilst = doWhilst$1; + exports.each = each; + exports.eachLimit = eachLimit$2; + exports.eachOf = eachOf$1; + exports.eachOfLimit = eachOfLimit$2; + exports.eachOfSeries = eachOfSeries$1; + exports.eachSeries = eachSeries$1; + exports.ensureAsync = ensureAsync; + exports.every = every$1; + exports.everyLimit = everyLimit$1; + exports.everySeries = everySeries$1; + exports.filter = filter$1; + exports.filterLimit = filterLimit$1; + exports.filterSeries = filterSeries$1; + exports.forever = forever$1; + exports.groupBy = groupBy; + exports.groupByLimit = groupByLimit$1; + exports.groupBySeries = groupBySeries; + exports.log = log; + exports.map = map$1; + exports.mapLimit = mapLimit$1; + exports.mapSeries = mapSeries$1; + exports.mapValues = mapValues; + exports.mapValuesLimit = mapValuesLimit$1; + exports.mapValuesSeries = mapValuesSeries; + exports.memoize = memoize; + exports.nextTick = nextTick; + exports.parallel = parallel$1; + exports.parallelLimit = parallelLimit; + exports.priorityQueue = priorityQueue; + exports.queue = queue$1; + exports.race = race$1; + exports.reduce = reduce$1; + exports.reduceRight = reduceRight; + exports.reflect = reflect; + exports.reflectAll = reflectAll; + exports.reject = reject$2; + exports.rejectLimit = rejectLimit$1; + exports.rejectSeries = rejectSeries$1; + exports.retry = retry; + exports.retryable = retryable; + exports.seq = seq; + exports.series = series; + exports.setImmediate = setImmediate$1; + exports.some = some$1; + exports.someLimit = someLimit$1; + exports.someSeries = someSeries$1; + exports.sortBy = sortBy$1; + exports.timeout = timeout; + exports.times = times; + exports.timesLimit = timesLimit; + exports.timesSeries = timesSeries; + exports.transform = transform; + exports.tryEach = tryEach$1; + exports.unmemoize = unmemoize; + exports.until = until; + exports.waterfall = waterfall$1; + exports.whilst = whilst$1; + exports.all = every$1; + exports.allLimit = everyLimit$1; + exports.allSeries = everySeries$1; + exports.any = some$1; + exports.anyLimit = someLimit$1; + exports.anySeries = someSeries$1; + exports.find = detect$1; + exports.findLimit = detectLimit$1; + exports.findSeries = detectSeries$1; + exports.flatMap = concat$1; + exports.flatMapLimit = concatLimit$1; + exports.flatMapSeries = concatSeries$1; + exports.forEach = each; + exports.forEachSeries = eachSeries$1; + exports.forEachLimit = eachLimit$2; + exports.forEachOf = eachOf$1; + exports.forEachOfSeries = eachOfSeries$1; + exports.forEachOfLimit = eachOfLimit$2; + exports.inject = reduce$1; + exports.foldl = reduce$1; + exports.foldr = reduceRight; + exports.select = filter$1; + exports.selectLimit = filterLimit$1; + exports.selectSeries = filterSeries$1; + exports.wrapSync = asyncify; + exports.during = whilst$1; + exports.doDuring = doWhilst$1; - for (var i = 0; i < b.length; i++) { - var byte = b[i] & 0xff; + Object.defineProperty(exports, '__esModule', { value: true }); - value <<= 7; - value += byte & 0x7f; - if ((byte & 0x80) === 0) { - values.push(value); - value = 0; - } - } +}))); - value = values.shift(); - values.unshift(value % 40); - values.unshift((value / 40) >> 0); - - return values.join('.'); -}; +/***/ }), -Reader.prototype._readTag = function (tag) { - assert.ok(tag !== undefined); +/***/ 14812: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - var b = this.peek(); +module.exports = +{ + parallel : __nccwpck_require__(8210), + serial : __nccwpck_require__(50445), + serialOrdered : __nccwpck_require__(3578) +}; - if (b === null) - return null; - if (b !== tag) - throw newInvalidAsn1Error('Expected 0x' + tag.toString(16) + - ': got 0x' + b.toString(16)); +/***/ }), - var o = this.readLength(this._offset + 1); // stored in `length` - if (o === null) - return null; +/***/ 1700: +/***/ ((module) => { - if (this.length > 4) - throw newInvalidAsn1Error('Integer too long: ' + this.length); +// API +module.exports = abort; - if (this.length > this._size - o) - return null; - this._offset = o; +/** + * Aborts leftover active jobs + * + * @param {object} state - current state object + */ +function abort(state) +{ + Object.keys(state.jobs).forEach(clean.bind(state)); - var fb = this._buf[this._offset]; - var value = 0; + // reset leftover jobs + state.jobs = {}; +} - for (var i = 0; i < this.length; i++) { - value <<= 8; - value |= (this._buf[this._offset++] & 0xff); +/** + * Cleans up leftover job by invoking abort function for the provided job id + * + * @this state + * @param {string|number} key - job id to abort + */ +function clean(key) +{ + if (typeof this.jobs[key] == 'function') + { + this.jobs[key](); } +} - if ((fb & 0x80) === 0x80 && i !== 4) - value -= (1 << (i * 8)); - return value >> 0; -}; +/***/ }), +/***/ 72794: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +var defer = __nccwpck_require__(15295); -// --- Exported API +// API +module.exports = async; -module.exports = Reader; +/** + * Runs provided callback asynchronously + * even if callback itself is not + * + * @param {function} callback - callback to invoke + * @returns {function} - augmented callback + */ +function async(callback) +{ + var isAsync = false; + + // check if async happened + defer(function() { isAsync = true; }); + + return function async_callback(err, result) + { + if (isAsync) + { + callback(err, result); + } + else + { + defer(function nextTick_callback() + { + callback(err, result); + }); + } + }; +} /***/ }), -/***/ 42473: +/***/ 15295: /***/ ((module) => { -// Copyright 2011 Mark Cavage All rights reserved. +module.exports = defer; +/** + * Runs provided function on next iteration of the event loop + * + * @param {function} fn - function to run + */ +function defer(fn) +{ + var nextTick = typeof setImmediate == 'function' + ? setImmediate + : ( + typeof process == 'object' && typeof process.nextTick == 'function' + ? process.nextTick + : null + ); -module.exports = { - EOC: 0, - Boolean: 1, - Integer: 2, - BitString: 3, - OctetString: 4, - Null: 5, - OID: 6, - ObjectDescriptor: 7, - External: 8, - Real: 9, // float - Enumeration: 10, - PDV: 11, - Utf8String: 12, - RelativeOID: 13, - Sequence: 16, - Set: 17, - NumericString: 18, - PrintableString: 19, - T61String: 20, - VideotexString: 21, - IA5String: 22, - UTCTime: 23, - GeneralizedTime: 24, - GraphicString: 25, - VisibleString: 26, - GeneralString: 28, - UniversalString: 29, - CharacterString: 30, - BMPString: 31, - Constructor: 32, - Context: 128 -}; + if (nextTick) + { + nextTick(fn); + } + else + { + setTimeout(fn, 0); + } +} /***/ }), -/***/ 43200: +/***/ 9023: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -// Copyright 2011 Mark Cavage All rights reserved. +var async = __nccwpck_require__(72794) + , abort = __nccwpck_require__(1700) + ; -var assert = __nccwpck_require__(42357); -var Buffer = __nccwpck_require__(15118).Buffer; -var ASN1 = __nccwpck_require__(42473); -var errors = __nccwpck_require__(99348); +// API +module.exports = iterate; +/** + * Iterates over each job object + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {object} state - current job status + * @param {function} callback - invoked when all elements processed + */ +function iterate(list, iterator, state, callback) +{ + // store current index + var key = state['keyedList'] ? state['keyedList'][state.index] : state.index; -// --- Globals + state.jobs[key] = runJob(iterator, key, list[key], function(error, output) + { + // don't repeat yourself + // skip secondary callbacks + if (!(key in state.jobs)) + { + return; + } -var newInvalidAsn1Error = errors.newInvalidAsn1Error; + // clean up jobs + delete state.jobs[key]; -var DEFAULT_OPTS = { - size: 1024, - growthFactor: 8 -}; + if (error) + { + // don't process rest of the results + // stop still active jobs + // and reset the list + abort(state); + } + else + { + state.results[key] = output; + } + // return salvaged results + callback(error, state.results); + }); +} -// --- Helpers +/** + * Runs iterator over provided job element + * + * @param {function} iterator - iterator to invoke + * @param {string|number} key - key/index of the element in the list of jobs + * @param {mixed} item - job description + * @param {function} callback - invoked after iterator is done with the job + * @returns {function|mixed} - job abort function or something else + */ +function runJob(iterator, key, item, callback) +{ + var aborter; -function merge(from, to) { - assert.ok(from); - assert.equal(typeof (from), 'object'); - assert.ok(to); - assert.equal(typeof (to), 'object'); + // allow shortcut if iterator expects only two arguments + if (iterator.length == 2) + { + aborter = iterator(item, async(callback)); + } + // otherwise go with full three arguments + else + { + aborter = iterator(item, key, async(callback)); + } - var keys = Object.getOwnPropertyNames(from); - keys.forEach(function (key) { - if (to[key]) - return; + return aborter; +} - var value = Object.getOwnPropertyDescriptor(from, key); - Object.defineProperty(to, key, value); - }); - return to; +/***/ }), + +/***/ 42474: +/***/ ((module) => { + +// API +module.exports = state; + +/** + * Creates initial state object + * for iteration over list + * + * @param {array|object} list - list to iterate over + * @param {function|null} sortMethod - function to use for keys sort, + * or `null` to keep them as is + * @returns {object} - initial state object + */ +function state(list, sortMethod) +{ + var isNamedList = !Array.isArray(list) + , initState = + { + index : 0, + keyedList: isNamedList || sortMethod ? Object.keys(list) : null, + jobs : {}, + results : isNamedList ? {} : [], + size : isNamedList ? Object.keys(list).length : list.length + } + ; + + if (sortMethod) + { + // sort array keys based on it's values + // sort object's keys just on own merit + initState.keyedList.sort(isNamedList ? sortMethod : function(a, b) + { + return sortMethod(list[a], list[b]); + }); + } + + return initState; } +/***/ }), -// --- API +/***/ 37942: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -function Writer(options) { - options = merge(DEFAULT_OPTS, options || {}); +var abort = __nccwpck_require__(1700) + , async = __nccwpck_require__(72794) + ; - this._buf = Buffer.alloc(options.size || 1024); - this._size = this._buf.length; - this._offset = 0; - this._options = options; +// API +module.exports = terminator; - // A list of offsets in the buffer where we need to insert - // sequence tag/len pairs. - this._seq = []; +/** + * Terminates jobs in the attached state context + * + * @this AsyncKitState# + * @param {function} callback - final callback to invoke after termination + */ +function terminator(callback) +{ + if (!Object.keys(this.jobs).length) + { + return; + } + + // fast forward iteration index + this.index = this.size; + + // abort jobs + abort(this); + + // send back results we have so far + async(callback)(null, this.results); } -Object.defineProperty(Writer.prototype, 'buffer', { - get: function () { - if (this._seq.length) - throw newInvalidAsn1Error(this._seq.length + ' unended sequence(s)'); - return (this._buf.slice(0, this._offset)); - } -}); +/***/ }), -Writer.prototype.writeByte = function (b) { - if (typeof (b) !== 'number') - throw new TypeError('argument must be a Number'); +/***/ 8210: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - this._ensure(1); - this._buf[this._offset++] = b; -}; +var iterate = __nccwpck_require__(9023) + , initState = __nccwpck_require__(42474) + , terminator = __nccwpck_require__(37942) + ; +// Public API +module.exports = parallel; -Writer.prototype.writeInt = function (i, tag) { - if (typeof (i) !== 'number') - throw new TypeError('argument must be a Number'); - if (typeof (tag) !== 'number') - tag = ASN1.Integer; +/** + * Runs iterator over provided array elements in parallel + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function parallel(list, iterator, callback) +{ + var state = initState(list); - var sz = 4; + while (state.index < (state['keyedList'] || list).length) + { + iterate(list, iterator, state, function(error, result) + { + if (error) + { + callback(error, result); + return; + } - while ((((i & 0xff800000) === 0) || ((i & 0xff800000) === 0xff800000 >> 0)) && - (sz > 1)) { - sz--; - i <<= 8; + // looks like it's the last one + if (Object.keys(state.jobs).length === 0) + { + callback(null, state.results); + return; + } + }); + + state.index++; } - if (sz > 4) - throw newInvalidAsn1Error('BER ints cannot be > 0xffffffff'); + return terminator.bind(state, callback); +} - this._ensure(2 + sz); - this._buf[this._offset++] = tag; - this._buf[this._offset++] = sz; - while (sz-- > 0) { - this._buf[this._offset++] = ((i & 0xff000000) >>> 24); - i <<= 8; - } +/***/ }), -}; +/***/ 50445: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +var serialOrdered = __nccwpck_require__(3578); -Writer.prototype.writeNull = function () { - this.writeByte(ASN1.Null); - this.writeByte(0x00); -}; +// Public API +module.exports = serial; +/** + * Runs iterator over provided array elements in series + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function serial(list, iterator, callback) +{ + return serialOrdered(list, iterator, null, callback); +} -Writer.prototype.writeEnumeration = function (i, tag) { - if (typeof (i) !== 'number') - throw new TypeError('argument must be a Number'); - if (typeof (tag) !== 'number') - tag = ASN1.Enumeration; - return this.writeInt(i, tag); -}; +/***/ }), +/***/ 3578: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -Writer.prototype.writeBoolean = function (b, tag) { - if (typeof (b) !== 'boolean') - throw new TypeError('argument must be a Boolean'); - if (typeof (tag) !== 'number') - tag = ASN1.Boolean; +var iterate = __nccwpck_require__(9023) + , initState = __nccwpck_require__(42474) + , terminator = __nccwpck_require__(37942) + ; - this._ensure(3); - this._buf[this._offset++] = tag; - this._buf[this._offset++] = 0x01; - this._buf[this._offset++] = b ? 0xff : 0x00; -}; +// Public API +module.exports = serialOrdered; +// sorting helpers +module.exports.ascending = ascending; +module.exports.descending = descending; +/** + * Runs iterator over provided sorted array elements in series + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} sortMethod - custom sort function + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function serialOrdered(list, iterator, sortMethod, callback) +{ + var state = initState(list, sortMethod); -Writer.prototype.writeString = function (s, tag) { - if (typeof (s) !== 'string') - throw new TypeError('argument must be a string (was: ' + typeof (s) + ')'); - if (typeof (tag) !== 'number') - tag = ASN1.OctetString; + iterate(list, iterator, state, function iteratorHandler(error, result) + { + if (error) + { + callback(error, result); + return; + } - var len = Buffer.byteLength(s); - this.writeByte(tag); - this.writeLength(len); - if (len) { - this._ensure(len); - this._buf.write(s, this._offset); - this._offset += len; - } -}; + state.index++; + // are we there yet? + if (state.index < (state['keyedList'] || list).length) + { + iterate(list, iterator, state, iteratorHandler); + return; + } -Writer.prototype.writeBuffer = function (buf, tag) { - if (typeof (tag) !== 'number') - throw new TypeError('tag must be a number'); - if (!Buffer.isBuffer(buf)) - throw new TypeError('argument must be a buffer'); + // done here + callback(null, state.results); + }); - this.writeByte(tag); - this.writeLength(buf.length); - this._ensure(buf.length); - buf.copy(this._buf, this._offset, 0, buf.length); - this._offset += buf.length; -}; + return terminator.bind(state, callback); +} +/* + * -- Sort methods + */ -Writer.prototype.writeStringArray = function (strings) { - if ((!strings instanceof Array)) - throw new TypeError('argument must be an Array[String]'); +/** + * sort helper to sort array elements in ascending order + * + * @param {mixed} a - an item to compare + * @param {mixed} b - an item to compare + * @returns {number} - comparison result + */ +function ascending(a, b) +{ + return a < b ? -1 : a > b ? 1 : 0; +} - var self = this; - strings.forEach(function (s) { - self.writeString(s); - }); -}; +/** + * sort helper to sort array elements in descending order + * + * @param {mixed} a - an item to compare + * @param {mixed} b - an item to compare + * @returns {number} - comparison result + */ +function descending(a, b) +{ + return -1 * ascending(a, b); +} -// This is really to solve DER cases, but whatever for now -Writer.prototype.writeOID = function (s, tag) { - if (typeof (s) !== 'string') - throw new TypeError('argument must be a string'); - if (typeof (tag) !== 'number') - tag = ASN1.OID; - if (!/^([0-9]+\.){3,}[0-9]+$/.test(s)) - throw new Error('argument is not a valid OID string'); +/***/ }), - function encodeOctet(bytes, octet) { - if (octet < 128) { - bytes.push(octet); - } else if (octet < 16384) { - bytes.push((octet >>> 7) | 0x80); - bytes.push(octet & 0x7F); - } else if (octet < 2097152) { - bytes.push((octet >>> 14) | 0x80); - bytes.push(((octet >>> 7) | 0x80) & 0xFF); - bytes.push(octet & 0x7F); - } else if (octet < 268435456) { - bytes.push((octet >>> 21) | 0x80); - bytes.push(((octet >>> 14) | 0x80) & 0xFF); - bytes.push(((octet >>> 7) | 0x80) & 0xFF); - bytes.push(octet & 0x7F); - } else { - bytes.push(((octet >>> 28) | 0x80) & 0xFF); - bytes.push(((octet >>> 21) | 0x80) & 0xFF); - bytes.push(((octet >>> 14) | 0x80) & 0xFF); - bytes.push(((octet >>> 7) | 0x80) & 0xFF); - bytes.push(octet & 0x7F); - } - } +/***/ 96342: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - var tmp = s.split('.'); - var bytes = []; - bytes.push(parseInt(tmp[0], 10) * 40 + parseInt(tmp[1], 10)); - tmp.slice(2).forEach(function (b) { - encodeOctet(bytes, parseInt(b, 10)); - }); - var self = this; - this._ensure(2 + bytes.length); - this.writeByte(tag); - this.writeLength(bytes.length); - bytes.forEach(function (b) { - self.writeByte(b); - }); -}; +/*! + * Copyright 2010 LearnBoost + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * Module dependencies. + */ -Writer.prototype.writeLength = function (len) { - if (typeof (len) !== 'number') - throw new TypeError('argument must be a Number'); +var crypto = __nccwpck_require__(76417) + , parse = __nccwpck_require__(78835).parse + ; - this._ensure(4); +/** + * Valid keys. + */ - if (len <= 0x7f) { - this._buf[this._offset++] = len; - } else if (len <= 0xff) { - this._buf[this._offset++] = 0x81; - this._buf[this._offset++] = len; - } else if (len <= 0xffff) { - this._buf[this._offset++] = 0x82; - this._buf[this._offset++] = len >> 8; - this._buf[this._offset++] = len; - } else if (len <= 0xffffff) { - this._buf[this._offset++] = 0x83; - this._buf[this._offset++] = len >> 16; - this._buf[this._offset++] = len >> 8; - this._buf[this._offset++] = len; - } else { - throw newInvalidAsn1Error('Length too long (> 4 bytes)'); - } -}; +var keys = + [ 'acl' + , 'location' + , 'logging' + , 'notification' + , 'partNumber' + , 'policy' + , 'requestPayment' + , 'torrent' + , 'uploadId' + , 'uploads' + , 'versionId' + , 'versioning' + , 'versions' + , 'website' + ] -Writer.prototype.startSequence = function (tag) { - if (typeof (tag) !== 'number') - tag = ASN1.Sequence | ASN1.Constructor; +/** + * Return an "Authorization" header value with the given `options` + * in the form of "AWS :" + * + * @param {Object} options + * @return {String} + * @api private + */ - this.writeByte(tag); - this._seq.push(this._offset); - this._ensure(3); - this._offset += 3; -}; +function authorization (options) { + return 'AWS ' + options.key + ':' + sign(options) +} +module.exports = authorization +module.exports.authorization = authorization -Writer.prototype.endSequence = function () { - var seq = this._seq.pop(); - var start = seq + 3; - var len = this._offset - start; +/** + * Simple HMAC-SHA1 Wrapper + * + * @param {Object} options + * @return {String} + * @api private + */ - if (len <= 0x7f) { - this._shift(start, len, -2); - this._buf[seq] = len; - } else if (len <= 0xff) { - this._shift(start, len, -1); - this._buf[seq] = 0x81; - this._buf[seq + 1] = len; - } else if (len <= 0xffff) { - this._buf[seq] = 0x82; - this._buf[seq + 1] = len >> 8; - this._buf[seq + 2] = len; - } else if (len <= 0xffffff) { - this._shift(start, len, 1); - this._buf[seq] = 0x83; - this._buf[seq + 1] = len >> 16; - this._buf[seq + 2] = len >> 8; - this._buf[seq + 3] = len; - } else { - throw newInvalidAsn1Error('Sequence too long'); - } -}; +function hmacSha1 (options) { + return crypto.createHmac('sha1', options.secret).update(options.message).digest('base64') +} +module.exports.hmacSha1 = hmacSha1 -Writer.prototype._shift = function (start, len, shift) { - assert.ok(start !== undefined); - assert.ok(len !== undefined); - assert.ok(shift); +/** + * Create a base64 sha1 HMAC for `options`. + * + * @param {Object} options + * @return {String} + * @api private + */ - this._buf.copy(this._buf, start + shift, start, start + len); - this._offset += shift; -}; +function sign (options) { + options.message = stringToSign(options) + return hmacSha1(options) +} +module.exports.sign = sign -Writer.prototype._ensure = function (len) { - assert.ok(len); +/** + * Create a base64 sha1 HMAC for `options`. + * + * Specifically to be used with S3 presigned URLs + * + * @param {Object} options + * @return {String} + * @api private + */ - if (this._size - this._offset < len) { - var sz = this._size * this._options.growthFactor; - if (sz - this._offset < len) - sz += len; +function signQuery (options) { + options.message = queryStringToSign(options) + return hmacSha1(options) +} +module.exports.signQuery= signQuery - var buf = Buffer.alloc(sz); +/** + * Return a string for sign() with the given `options`. + * + * Spec: + * + * \n + * \n + * \n + * \n + * [headers\n] + * + * + * @param {Object} options + * @return {String} + * @api private + */ - this._buf.copy(buf, 0, 0, this._offset); - this._buf = buf; - this._size = sz; +function stringToSign (options) { + var headers = options.amazonHeaders || '' + if (headers) headers += '\n' + var r = + [ options.verb + , options.md5 + , options.contentType + , options.date ? options.date.toUTCString() : '' + , headers + options.resource + ] + return r.join('\n') +} +module.exports.stringToSign = stringToSign + +/** + * Return a string for sign() with the given `options`, but is meant exclusively + * for S3 presigned URLs + * + * Spec: + * + * \n + * + * + * @param {Object} options + * @return {String} + * @api private + */ + +function queryStringToSign (options){ + return 'GET\n\n\n' + options.date + '\n' + options.resource +} +module.exports.queryStringToSign = queryStringToSign + +/** + * Perform the following: + * + * - ignore non-amazon headers + * - lowercase fields + * - sort lexicographically + * - trim whitespace between ":" + * - join with newline + * + * @param {Object} headers + * @return {String} + * @api private + */ + +function canonicalizeHeaders (headers) { + var buf = [] + , fields = Object.keys(headers) + ; + for (var i = 0, len = fields.length; i < len; ++i) { + var field = fields[i] + , val = headers[field] + , field = field.toLowerCase() + ; + if (0 !== field.indexOf('x-amz')) continue + buf.push(field + ':' + val) } -}; + return buf.sort().join('\n') +} +module.exports.canonicalizeHeaders = canonicalizeHeaders +/** + * Perform the following: + * + * - ignore non sub-resources + * - sort lexicographically + * + * @param {String} resource + * @return {String} + * @api private + */ +function canonicalizeResource (resource) { + var url = parse(resource, true) + , path = url.pathname + , buf = [] + ; -// --- Exported API + Object.keys(url.query).forEach(function(key){ + if (!~keys.indexOf(key)) return + var val = '' == url.query[key] ? '' : '=' + encodeURIComponent(url.query[key]) + buf.push(key + val) + }) -module.exports = Writer; + return path + (buf.length ? '?' + buf.sort().join('&') : '') +} +module.exports.canonicalizeResource = canonicalizeResource /***/ }), -/***/ 80970: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -// Copyright 2011 Mark Cavage All rights reserved. +/***/ 16071: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -// If you have no idea what ASN.1 or BER is, see this: -// ftp://ftp.rsa.com/pub/pkcs/ascii/layman.asc +var aws4 = exports, + url = __nccwpck_require__(78835), + querystring = __nccwpck_require__(71191), + crypto = __nccwpck_require__(76417), + lru = __nccwpck_require__(74225), + credentialsCache = lru(1000) -var Ber = __nccwpck_require__(194); +// http://docs.amazonwebservices.com/general/latest/gr/signature-version-4.html +function hmac(key, string, encoding) { + return crypto.createHmac('sha256', key).update(string, 'utf8').digest(encoding) +} +function hash(string, encoding) { + return crypto.createHash('sha256').update(string, 'utf8').digest(encoding) +} -// --- Exported API +// This function assumes the string has already been percent encoded +function encodeRfc3986(urlEncodedString) { + return urlEncodedString.replace(/[!'()*]/g, function(c) { + return '%' + c.charCodeAt(0).toString(16).toUpperCase() + }) +} -module.exports = { +function encodeRfc3986Full(str) { + return encodeRfc3986(encodeURIComponent(str)) +} - Ber: Ber, +// A bit of a combination of: +// https://github.com/aws/aws-sdk-java-v2/blob/dc695de6ab49ad03934e1b02e7263abbd2354be0/core/auth/src/main/java/software/amazon/awssdk/auth/signer/internal/AbstractAws4Signer.java#L59 +// https://github.com/aws/aws-sdk-js/blob/18cb7e5b463b46239f9fdd4a65e2ff8c81831e8f/lib/signers/v4.js#L191-L199 +// https://github.com/mhart/aws4fetch/blob/b3aed16b6f17384cf36ea33bcba3c1e9f3bdfefd/src/main.js#L25-L34 +var HEADERS_TO_IGNORE = { + 'authorization': true, + 'connection': true, + 'x-amzn-trace-id': true, + 'user-agent': true, + 'expect': true, + 'presigned-expires': true, + 'range': true, +} - BerReader: Ber.Reader, +// request: { path | body, [host], [method], [headers], [service], [region] } +// credentials: { accessKeyId, secretAccessKey, [sessionToken] } +function RequestSigner(request, credentials) { - BerWriter: Ber.Writer + if (typeof request === 'string') request = url.parse(request) -}; + var headers = request.headers = (request.headers || {}), + hostParts = (!this.service || !this.region) && this.matchHost(request.hostname || request.host || headers.Host || headers.host) + this.request = request + this.credentials = credentials || this.defaultCredentials() -/***/ }), + this.service = request.service || hostParts[0] || '' + this.region = request.region || hostParts[1] || 'us-east-1' -/***/ 66631: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // SES uses a different domain from the service name + if (this.service === 'email') this.service = 'ses' -// Copyright (c) 2012, Mark Cavage. All rights reserved. -// Copyright 2015 Joyent, Inc. + if (!request.method && request.body) + request.method = 'POST' -var assert = __nccwpck_require__(42357); -var Stream = __nccwpck_require__(92413).Stream; -var util = __nccwpck_require__(31669); + if (!headers.Host && !headers.host) { + headers.Host = request.hostname || request.host || this.createHost() + // If a port is specified explicitly, use it as is + if (request.port) + headers.Host += ':' + request.port + } + if (!request.hostname && !request.host) + request.hostname = headers.Host || headers.host -///--- Globals + this.isCodeCommitGit = this.service === 'codecommit' && request.method === 'GIT' +} -/* JSSTYLED */ -var UUID_REGEXP = /^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$/; +RequestSigner.prototype.matchHost = function(host) { + var match = (host || '').match(/([^\.]+)\.(?:([^\.]*)\.)?amazonaws\.com(\.cn)?$/) + var hostParts = (match || []).slice(1, 3) + // ES's hostParts are sometimes the other way round, if the value that is expected + // to be region equals ‘es’ switch them back + // e.g. search-cluster-name-aaaa00aaaa0aaa0aaaaaaa0aaa.us-east-1.es.amazonaws.com + if (hostParts[1] === 'es') + hostParts = hostParts.reverse() -///--- Internal + if (hostParts[1] == 's3') { + hostParts[0] = 's3' + hostParts[1] = 'us-east-1' + } else { + for (var i = 0; i < 2; i++) { + if (/^s3-/.test(hostParts[i])) { + hostParts[1] = hostParts[i].slice(3) + hostParts[0] = 's3' + break + } + } + } -function _capitalize(str) { - return (str.charAt(0).toUpperCase() + str.slice(1)); + return hostParts } -function _toss(name, expected, oper, arg, actual) { - throw new assert.AssertionError({ - message: util.format('%s (%s) is required', name, expected), - actual: (actual === undefined) ? typeof (arg) : actual(arg), - expected: expected, - operator: oper || '===', - stackStartFunction: _toss.caller - }); -} +// http://docs.aws.amazon.com/general/latest/gr/rande.html +RequestSigner.prototype.isSingleRegion = function() { + // Special case for S3 and SimpleDB in us-east-1 + if (['s3', 'sdb'].indexOf(this.service) >= 0 && this.region === 'us-east-1') return true -function _getClass(arg) { - return (Object.prototype.toString.call(arg).slice(8, -1)); + return ['cloudfront', 'ls', 'route53', 'iam', 'importexport', 'sts'] + .indexOf(this.service) >= 0 } -function noop() { - // Why even bother with asserts? +RequestSigner.prototype.createHost = function() { + var region = this.isSingleRegion() ? '' : '.' + this.region, + subdomain = this.service === 'ses' ? 'email' : this.service + return subdomain + region + '.amazonaws.com' } +RequestSigner.prototype.prepareRequest = function() { + this.parsePath() -///--- Exports + var request = this.request, headers = request.headers, query -var types = { - bool: { - check: function (arg) { return typeof (arg) === 'boolean'; } - }, - func: { - check: function (arg) { return typeof (arg) === 'function'; } - }, - string: { - check: function (arg) { return typeof (arg) === 'string'; } - }, - object: { - check: function (arg) { - return typeof (arg) === 'object' && arg !== null; - } - }, - number: { - check: function (arg) { - return typeof (arg) === 'number' && !isNaN(arg); - } - }, - finite: { - check: function (arg) { - return typeof (arg) === 'number' && !isNaN(arg) && isFinite(arg); - } - }, - buffer: { - check: function (arg) { return Buffer.isBuffer(arg); }, - operator: 'Buffer.isBuffer' - }, - array: { - check: function (arg) { return Array.isArray(arg); }, - operator: 'Array.isArray' - }, - stream: { - check: function (arg) { return arg instanceof Stream; }, - operator: 'instanceof', - actual: _getClass - }, - date: { - check: function (arg) { return arg instanceof Date; }, - operator: 'instanceof', - actual: _getClass - }, - regexp: { - check: function (arg) { return arg instanceof RegExp; }, - operator: 'instanceof', - actual: _getClass - }, - uuid: { - check: function (arg) { - return typeof (arg) === 'string' && UUID_REGEXP.test(arg); - }, - operator: 'isUUID' - } -}; + if (request.signQuery) { -function _setExports(ndebug) { - var keys = Object.keys(types); - var out; + this.parsedPath.query = query = this.parsedPath.query || {} - /* re-export standard assert */ - if (process.env.NODE_NDEBUG) { - out = noop; - } else { - out = function (arg, msg) { - if (!arg) { - _toss(msg, 'true', arg); - } - }; + if (this.credentials.sessionToken) + query['X-Amz-Security-Token'] = this.credentials.sessionToken + + if (this.service === 's3' && !query['X-Amz-Expires']) + query['X-Amz-Expires'] = 86400 + + if (query['X-Amz-Date']) + this.datetime = query['X-Amz-Date'] + else + query['X-Amz-Date'] = this.getDateTime() + + query['X-Amz-Algorithm'] = 'AWS4-HMAC-SHA256' + query['X-Amz-Credential'] = this.credentials.accessKeyId + '/' + this.credentialString() + query['X-Amz-SignedHeaders'] = this.signedHeaders() + + } else { + + if (!request.doNotModifyHeaders && !this.isCodeCommitGit) { + if (request.body && !headers['Content-Type'] && !headers['content-type']) + headers['Content-Type'] = 'application/x-www-form-urlencoded; charset=utf-8' + + if (request.body && !headers['Content-Length'] && !headers['content-length']) + headers['Content-Length'] = Buffer.byteLength(request.body) + + if (this.credentials.sessionToken && !headers['X-Amz-Security-Token'] && !headers['x-amz-security-token']) + headers['X-Amz-Security-Token'] = this.credentials.sessionToken + + if (this.service === 's3' && !headers['X-Amz-Content-Sha256'] && !headers['x-amz-content-sha256']) + headers['X-Amz-Content-Sha256'] = hash(this.request.body || '', 'hex') + + if (headers['X-Amz-Date'] || headers['x-amz-date']) + this.datetime = headers['X-Amz-Date'] || headers['x-amz-date'] + else + headers['X-Amz-Date'] = this.getDateTime() } - /* standard checks */ - keys.forEach(function (k) { - if (ndebug) { - out[k] = noop; - return; - } - var type = types[k]; - out[k] = function (arg, msg) { - if (!type.check(arg)) { - _toss(msg, k, type.operator, arg, type.actual); - } - }; - }); + delete headers.Authorization + delete headers.authorization + } +} - /* optional checks */ - keys.forEach(function (k) { - var name = 'optional' + _capitalize(k); - if (ndebug) { - out[name] = noop; - return; - } - var type = types[k]; - out[name] = function (arg, msg) { - if (arg === undefined || arg === null) { - return; - } - if (!type.check(arg)) { - _toss(msg, k, type.operator, arg, type.actual); - } - }; - }); +RequestSigner.prototype.sign = function() { + if (!this.parsedPath) this.prepareRequest() - /* arrayOf checks */ - keys.forEach(function (k) { - var name = 'arrayOf' + _capitalize(k); - if (ndebug) { - out[name] = noop; - return; - } - var type = types[k]; - var expected = '[' + k + ']'; - out[name] = function (arg, msg) { - if (!Array.isArray(arg)) { - _toss(msg, expected, type.operator, arg, type.actual); - } - var i; - for (i = 0; i < arg.length; i++) { - if (!type.check(arg[i])) { - _toss(msg, expected, type.operator, arg, type.actual); - } - } - }; - }); + if (this.request.signQuery) { + this.parsedPath.query['X-Amz-Signature'] = this.signature() + } else { + this.request.headers.Authorization = this.authHeader() + } - /* optionalArrayOf checks */ - keys.forEach(function (k) { - var name = 'optionalArrayOf' + _capitalize(k); - if (ndebug) { - out[name] = noop; - return; - } - var type = types[k]; - var expected = '[' + k + ']'; - out[name] = function (arg, msg) { - if (arg === undefined || arg === null) { - return; - } - if (!Array.isArray(arg)) { - _toss(msg, expected, type.operator, arg, type.actual); - } - var i; - for (i = 0; i < arg.length; i++) { - if (!type.check(arg[i])) { - _toss(msg, expected, type.operator, arg, type.actual); - } - } - }; - }); + this.request.path = this.formatPath() - /* re-export built-in assertions */ - Object.keys(assert).forEach(function (k) { - if (k === 'AssertionError') { - out[k] = assert[k]; - return; - } - if (ndebug) { - out[k] = noop; - return; - } - out[k] = assert[k]; - }); + return this.request +} - /* export ourselves (for unit tests _only_) */ - out._setExports = _setExports; +RequestSigner.prototype.getDateTime = function() { + if (!this.datetime) { + var headers = this.request.headers, + date = new Date(headers.Date || headers.date || new Date) - return out; + this.datetime = date.toISOString().replace(/[:\-]|\.\d{3}/g, '') + + // Remove the trailing 'Z' on the timestamp string for CodeCommit git access + if (this.isCodeCommitGit) this.datetime = this.datetime.slice(0, -1) + } + return this.datetime } -module.exports = _setExports(process.env.NODE_NDEBUG); +RequestSigner.prototype.getDate = function() { + return this.getDateTime().substr(0, 8) +} +RequestSigner.prototype.authHeader = function() { + return [ + 'AWS4-HMAC-SHA256 Credential=' + this.credentials.accessKeyId + '/' + this.credentialString(), + 'SignedHeaders=' + this.signedHeaders(), + 'Signature=' + this.signature(), + ].join(', ') +} -/***/ }), +RequestSigner.prototype.signature = function() { + var date = this.getDate(), + cacheKey = [this.credentials.secretAccessKey, date, this.region, this.service].join(), + kDate, kRegion, kService, kCredentials = credentialsCache.get(cacheKey) + if (!kCredentials) { + kDate = hmac('AWS4' + this.credentials.secretAccessKey, date) + kRegion = hmac(kDate, this.region) + kService = hmac(kRegion, this.service) + kCredentials = hmac(kService, 'aws4_request') + credentialsCache.set(cacheKey, kCredentials) + } + return hmac(kCredentials, this.stringToSign(), 'hex') +} -/***/ 57888: -/***/ (function(__unused_webpack_module, exports) { +RequestSigner.prototype.stringToSign = function() { + return [ + 'AWS4-HMAC-SHA256', + this.getDateTime(), + this.credentialString(), + hash(this.canonicalString(), 'hex'), + ].join('\n') +} -(function (global, factory) { - true ? factory(exports) : - 0; -}(this, (function (exports) { 'use strict'; +RequestSigner.prototype.canonicalString = function() { + if (!this.parsedPath) this.prepareRequest() - /** - * Creates a continuation function with some arguments already applied. - * - * Useful as a shorthand when combined with other control flow functions. Any - * arguments passed to the returned function are added to the arguments - * originally passed to apply. - * - * @name apply - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {Function} fn - The function you want to eventually apply all - * arguments to. Invokes with (arguments...). - * @param {...*} arguments... - Any number of arguments to automatically apply - * when the continuation is called. - * @returns {Function} the partially-applied function - * @example - * - * // using apply - * async.parallel([ - * async.apply(fs.writeFile, 'testfile1', 'test1'), - * async.apply(fs.writeFile, 'testfile2', 'test2') - * ]); - * - * - * // the same process without using apply - * async.parallel([ - * function(callback) { - * fs.writeFile('testfile1', 'test1', callback); - * }, - * function(callback) { - * fs.writeFile('testfile2', 'test2', callback); - * } - * ]); - * - * // It's possible to pass any number of additional arguments when calling the - * // continuation: - * - * node> var fn = async.apply(sys.puts, 'one'); - * node> fn('two', 'three'); - * one - * two - * three - */ - function apply(fn, ...args) { - return (...callArgs) => fn(...args,...callArgs); - } + var pathStr = this.parsedPath.path, + query = this.parsedPath.query, + headers = this.request.headers, + queryStr = '', + normalizePath = this.service !== 's3', + decodePath = this.service === 's3' || this.request.doNotEncodePath, + decodeSlashesInPath = this.service === 's3', + firstValOnly = this.service === 's3', + bodyHash - function initialParams (fn) { - return function (...args/*, callback*/) { - var callback = args.pop(); - return fn.call(this, args, callback); - }; - } + if (this.service === 's3' && this.request.signQuery) { + bodyHash = 'UNSIGNED-PAYLOAD' + } else if (this.isCodeCommitGit) { + bodyHash = '' + } else { + bodyHash = headers['X-Amz-Content-Sha256'] || headers['x-amz-content-sha256'] || + hash(this.request.body || '', 'hex') + } - /* istanbul ignore file */ + if (query) { + var reducedQuery = Object.keys(query).reduce(function(obj, key) { + if (!key) return obj + obj[encodeRfc3986Full(key)] = !Array.isArray(query[key]) ? query[key] : + (firstValOnly ? query[key][0] : query[key]) + return obj + }, {}) + var encodedQueryPieces = [] + Object.keys(reducedQuery).sort().forEach(function(key) { + if (!Array.isArray(reducedQuery[key])) { + encodedQueryPieces.push(key + '=' + encodeRfc3986Full(reducedQuery[key])) + } else { + reducedQuery[key].map(encodeRfc3986Full).sort() + .forEach(function(val) { encodedQueryPieces.push(key + '=' + val) }) + } + }) + queryStr = encodedQueryPieces.join('&') + } + if (pathStr !== '/') { + if (normalizePath) pathStr = pathStr.replace(/\/{2,}/g, '/') + pathStr = pathStr.split('/').reduce(function(path, piece) { + if (normalizePath && piece === '..') { + path.pop() + } else if (!normalizePath || piece !== '.') { + if (decodePath) piece = decodeURIComponent(piece.replace(/\+/g, ' ')) + path.push(encodeRfc3986Full(piece)) + } + return path + }, []).join('/') + if (pathStr[0] !== '/') pathStr = '/' + pathStr + if (decodeSlashesInPath) pathStr = pathStr.replace(/%2F/g, '/') + } - var hasSetImmediate = typeof setImmediate === 'function' && setImmediate; - var hasNextTick = typeof process === 'object' && typeof process.nextTick === 'function'; + return [ + this.request.method || 'GET', + pathStr, + queryStr, + this.canonicalHeaders() + '\n', + this.signedHeaders(), + bodyHash, + ].join('\n') +} - function fallback(fn) { - setTimeout(fn, 0); - } +RequestSigner.prototype.canonicalHeaders = function() { + var headers = this.request.headers + function trimAll(header) { + return header.toString().trim().replace(/\s+/g, ' ') + } + return Object.keys(headers) + .filter(function(key) { return HEADERS_TO_IGNORE[key.toLowerCase()] == null }) + .sort(function(a, b) { return a.toLowerCase() < b.toLowerCase() ? -1 : 1 }) + .map(function(key) { return key.toLowerCase() + ':' + trimAll(headers[key]) }) + .join('\n') +} - function wrap(defer) { - return (fn, ...args) => defer(() => fn(...args)); - } +RequestSigner.prototype.signedHeaders = function() { + return Object.keys(this.request.headers) + .map(function(key) { return key.toLowerCase() }) + .filter(function(key) { return HEADERS_TO_IGNORE[key] == null }) + .sort() + .join(';') +} - var _defer; +RequestSigner.prototype.credentialString = function() { + return [ + this.getDate(), + this.region, + this.service, + 'aws4_request', + ].join('/') +} - if (hasSetImmediate) { - _defer = setImmediate; - } else if (hasNextTick) { - _defer = process.nextTick; - } else { - _defer = fallback; - } +RequestSigner.prototype.defaultCredentials = function() { + var env = process.env + return { + accessKeyId: env.AWS_ACCESS_KEY_ID || env.AWS_ACCESS_KEY, + secretAccessKey: env.AWS_SECRET_ACCESS_KEY || env.AWS_SECRET_KEY, + sessionToken: env.AWS_SESSION_TOKEN, + } +} - var setImmediate$1 = wrap(_defer); +RequestSigner.prototype.parsePath = function() { + var path = this.request.path || '/' - /** - * Take a sync function and make it async, passing its return value to a - * callback. This is useful for plugging sync functions into a waterfall, - * series, or other async functions. Any arguments passed to the generated - * function will be passed to the wrapped function (except for the final - * callback argument). Errors thrown will be passed to the callback. - * - * If the function passed to `asyncify` returns a Promise, that promises's - * resolved/rejected state will be used to call the callback, rather than simply - * the synchronous return value. - * - * This also means you can asyncify ES2017 `async` functions. - * - * @name asyncify - * @static - * @memberOf module:Utils - * @method - * @alias wrapSync - * @category Util - * @param {Function} func - The synchronous function, or Promise-returning - * function to convert to an {@link AsyncFunction}. - * @returns {AsyncFunction} An asynchronous wrapper of the `func`. To be - * invoked with `(args..., callback)`. - * @example - * - * // passing a regular synchronous function - * async.waterfall([ - * async.apply(fs.readFile, filename, "utf8"), - * async.asyncify(JSON.parse), - * function (data, next) { - * // data is the result of parsing the text. - * // If there was a parsing error, it would have been caught. - * } - * ], callback); - * - * // passing a function returning a promise - * async.waterfall([ - * async.apply(fs.readFile, filename, "utf8"), - * async.asyncify(function (contents) { - * return db.model.create(contents); - * }), - * function (model, next) { - * // `model` is the instantiated model object. - * // If there was an error, this function would be skipped. - * } - * ], callback); - * - * // es2017 example, though `asyncify` is not needed if your JS environment - * // supports async functions out of the box - * var q = async.queue(async.asyncify(async function(file) { - * var intermediateStep = await processFile(file); - * return await somePromise(intermediateStep) - * })); - * - * q.push(files); - */ - function asyncify(func) { - if (isAsync(func)) { - return function (...args/*, callback*/) { - const callback = args.pop(); - const promise = func.apply(this, args); - return handlePromise(promise, callback) - } - } + // S3 doesn't always encode characters > 127 correctly and + // all services don't encode characters > 255 correctly + // So if there are non-reserved chars (and it's not already all % encoded), just encode them all + if (/[^0-9A-Za-z;,/?:@&=+$\-_.!~*'()#%]/.test(path)) { + path = encodeURI(decodeURI(path)) + } - return initialParams(function (args, callback) { - var result; - try { - result = func.apply(this, args); - } catch (e) { - return callback(e); - } - // if result is Promise object - if (result && typeof result.then === 'function') { - return handlePromise(result, callback) - } else { - callback(null, result); - } - }); - } + var queryIx = path.indexOf('?'), + query = null - function handlePromise(promise, callback) { - return promise.then(value => { - invokeCallback(callback, null, value); - }, err => { - invokeCallback(callback, err && err.message ? err : new Error(err)); - }); - } + if (queryIx >= 0) { + query = querystring.parse(path.slice(queryIx + 1)) + path = path.slice(0, queryIx) + } - function invokeCallback(callback, error, value) { - try { - callback(error, value); - } catch (err) { - setImmediate$1(e => { throw e }, err); - } - } + this.parsedPath = { + path: path, + query: query, + } +} - function isAsync(fn) { - return fn[Symbol.toStringTag] === 'AsyncFunction'; - } +RequestSigner.prototype.formatPath = function() { + var path = this.parsedPath.path, + query = this.parsedPath.query - function isAsyncGenerator(fn) { - return fn[Symbol.toStringTag] === 'AsyncGenerator'; - } + if (!query) return path - function isAsyncIterable(obj) { - return typeof obj[Symbol.asyncIterator] === 'function'; - } + // Services don't support empty query string keys + if (query[''] != null) delete query[''] - function wrapAsync(asyncFn) { - if (typeof asyncFn !== 'function') throw new Error('expected a function') - return isAsync(asyncFn) ? asyncify(asyncFn) : asyncFn; - } + return path + '?' + encodeRfc3986(querystring.stringify(query)) +} - // conditionally promisify a function. - // only return a promise if a callback is omitted - function awaitify (asyncFn, arity = asyncFn.length) { - if (!arity) throw new Error('arity is undefined') - function awaitable (...args) { - if (typeof args[arity - 1] === 'function') { - return asyncFn.apply(this, args) - } +aws4.RequestSigner = RequestSigner - return new Promise((resolve, reject) => { - args[arity - 1] = (err, ...cbArgs) => { - if (err) return reject(err) - resolve(cbArgs.length > 1 ? cbArgs : cbArgs[0]); - }; - asyncFn.apply(this, args); - }) - } +aws4.sign = function(request, credentials) { + return new RequestSigner(request, credentials).sign() +} - return awaitable - } - function applyEach (eachfn) { - return function applyEach(fns, ...callArgs) { - const go = awaitify(function (callback) { - var that = this; - return eachfn(fns, (fn, cb) => { - wrapAsync(fn).apply(that, callArgs.concat(cb)); - }, callback); - }); - return go; - }; - } +/***/ }), - function _asyncMap(eachfn, arr, iteratee, callback) { - arr = arr || []; - var results = []; - var counter = 0; - var _iteratee = wrapAsync(iteratee); +/***/ 74225: +/***/ ((module) => { - return eachfn(arr, (value, _, iterCb) => { - var index = counter++; - _iteratee(value, (err, v) => { - results[index] = v; - iterCb(err); - }); - }, err => { - callback(err, results); - }); - } +module.exports = function(size) { + return new LruCache(size) +} - function isArrayLike(value) { - return value && - typeof value.length === 'number' && - value.length >= 0 && - value.length % 1 === 0; - } +function LruCache(size) { + this.capacity = size | 0 + this.map = Object.create(null) + this.list = new DoublyLinkedList() +} - // A temporary value used to identify if the loop should be broken. - // See #1064, #1293 - const breakLoop = {}; +LruCache.prototype.get = function(key) { + var node = this.map[key] + if (node == null) return undefined + this.used(node) + return node.val +} - function once(fn) { - function wrapper (...args) { - if (fn === null) return; - var callFn = fn; - fn = null; - callFn.apply(this, args); - } - Object.assign(wrapper, fn); - return wrapper - } +LruCache.prototype.set = function(key, val) { + var node = this.map[key] + if (node != null) { + node.val = val + } else { + if (!this.capacity) this.prune() + if (!this.capacity) return false + node = new DoublyLinkedNode(key, val) + this.map[key] = node + this.capacity-- + } + this.used(node) + return true +} - function getIterator (coll) { - return coll[Symbol.iterator] && coll[Symbol.iterator](); - } +LruCache.prototype.used = function(node) { + this.list.moveToFront(node) +} - function createArrayIterator(coll) { - var i = -1; - var len = coll.length; - return function next() { - return ++i < len ? {value: coll[i], key: i} : null; - } - } +LruCache.prototype.prune = function() { + var node = this.list.pop() + if (node != null) { + delete this.map[node.key] + this.capacity++ + } +} - function createES2015Iterator(iterator) { - var i = -1; - return function next() { - var item = iterator.next(); - if (item.done) - return null; - i++; - return {value: item.value, key: i}; - } - } - function createObjectIterator(obj) { - var okeys = obj ? Object.keys(obj) : []; - var i = -1; - var len = okeys.length; - return function next() { - var key = okeys[++i]; - return i < len ? {value: obj[key], key} : null; - }; - } +function DoublyLinkedList() { + this.firstNode = null + this.lastNode = null +} - function createIterator(coll) { - if (isArrayLike(coll)) { - return createArrayIterator(coll); - } +DoublyLinkedList.prototype.moveToFront = function(node) { + if (this.firstNode == node) return - var iterator = getIterator(coll); - return iterator ? createES2015Iterator(iterator) : createObjectIterator(coll); - } + this.remove(node) - function onlyOnce(fn) { - return function (...args) { - if (fn === null) throw new Error("Callback was already called."); - var callFn = fn; - fn = null; - callFn.apply(this, args); - }; - } + if (this.firstNode == null) { + this.firstNode = node + this.lastNode = node + node.prev = null + node.next = null + } else { + node.prev = null + node.next = this.firstNode + node.next.prev = node + this.firstNode = node + } +} - // for async generators - function asyncEachOfLimit(generator, limit, iteratee, callback) { - let done = false; - let canceled = false; - let awaiting = false; - let running = 0; - let idx = 0; +DoublyLinkedList.prototype.pop = function() { + var lastNode = this.lastNode + if (lastNode != null) { + this.remove(lastNode) + } + return lastNode +} - function replenish() { - //console.log('replenish') - if (running >= limit || awaiting || done) return - //console.log('replenish awaiting') - awaiting = true; - generator.next().then(({value, done: iterDone}) => { - //console.log('got value', value) - if (canceled || done) return - awaiting = false; - if (iterDone) { - done = true; - if (running <= 0) { - //console.log('done nextCb') - callback(null); - } - return; - } - running++; - iteratee(value, idx, iterateeCallback); - idx++; - replenish(); - }).catch(handleError); - } +DoublyLinkedList.prototype.remove = function(node) { + if (this.firstNode == node) { + this.firstNode = node.next + } else if (node.prev != null) { + node.prev.next = node.next + } + if (this.lastNode == node) { + this.lastNode = node.prev + } else if (node.next != null) { + node.next.prev = node.prev + } +} - function iterateeCallback(err, result) { - //console.log('iterateeCallback') - running -= 1; - if (canceled) return - if (err) return handleError(err) - if (err === false) { - done = true; - canceled = true; - return - } +function DoublyLinkedNode(key, val) { + this.key = key + this.val = val + this.prev = null + this.next = null +} - if (result === breakLoop || (done && running <= 0)) { - done = true; - //console.log('done iterCb') - return callback(null); - } - replenish(); - } - function handleError(err) { - if (canceled) return - awaiting = false; - done = true; - callback(err); - } +/***/ }), - replenish(); - } +/***/ 96545: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - var eachOfLimit = (limit) => { - return (obj, iteratee, callback) => { - callback = once(callback); - if (limit <= 0) { - throw new RangeError('concurrency limit cannot be less than 1') - } - if (!obj) { - return callback(null); - } - if (isAsyncGenerator(obj)) { - return asyncEachOfLimit(obj, limit, iteratee, callback) - } - if (isAsyncIterable(obj)) { - return asyncEachOfLimit(obj[Symbol.asyncIterator](), limit, iteratee, callback) - } - var nextElem = createIterator(obj); - var done = false; - var canceled = false; - var running = 0; - var looping = false; +module.exports = __nccwpck_require__(52618); - function iterateeCallback(err, value) { - if (canceled) return - running -= 1; - if (err) { - done = true; - callback(err); - } - else if (err === false) { - done = true; - canceled = true; - } - else if (value === breakLoop || (done && running <= 0)) { - done = true; - return callback(null); - } - else if (!looping) { - replenish(); - } - } +/***/ }), - function replenish () { - looping = true; - while (running < limit && !done) { - var elem = nextElem(); - if (elem === null) { - done = true; - if (running <= 0) { - callback(null); - } - return; - } - running += 1; - iteratee(elem.value, elem.key, onlyOnce(iterateeCallback)); - } - looping = false; - } +/***/ 68104: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - replenish(); - }; - }; +"use strict"; - /** - * The same as [`eachOf`]{@link module:Collections.eachOf} but runs a maximum of `limit` async operations at a - * time. - * - * @name eachOfLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.eachOf]{@link module:Collections.eachOf} - * @alias forEachOfLimit - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async function to apply to each - * item in `coll`. The `key` is the item's key, or index in the case of an - * array. - * Invoked with (item, key, callback). - * @param {Function} [callback] - A callback which is called when all - * `iteratee` functions have finished, or an error occurs. Invoked with (err). - * @returns {Promise} a promise, if a callback is omitted - */ - function eachOfLimit$1(coll, limit, iteratee, callback) { - return eachOfLimit(limit)(coll, wrapAsync(iteratee), callback); - } - var eachOfLimit$2 = awaitify(eachOfLimit$1, 4); +var utils = __nccwpck_require__(20328); +var settle = __nccwpck_require__(13211); +var buildFullPath = __nccwpck_require__(41934); +var buildURL = __nccwpck_require__(30646); +var http = __nccwpck_require__(98605); +var https = __nccwpck_require__(57211); +var httpFollow = __nccwpck_require__(67707).http; +var httpsFollow = __nccwpck_require__(67707).https; +var url = __nccwpck_require__(78835); +var zlib = __nccwpck_require__(78761); +var pkg = __nccwpck_require__(20696); +var createError = __nccwpck_require__(15226); +var enhanceError = __nccwpck_require__(21516); - // eachOf implementation optimized for array-likes - function eachOfArrayLike(coll, iteratee, callback) { - callback = once(callback); - var index = 0, - completed = 0, - {length} = coll, - canceled = false; - if (length === 0) { - callback(null); - } +var isHttps = /https:?/; - function iteratorCallback(err, value) { - if (err === false) { - canceled = true; - } - if (canceled === true) return - if (err) { - callback(err); - } else if ((++completed === length) || value === breakLoop) { - callback(null); - } - } +/** + * + * @param {http.ClientRequestArgs} options + * @param {AxiosProxyConfig} proxy + * @param {string} location + */ +function setProxy(options, proxy, location) { + options.hostname = proxy.host; + options.host = proxy.host; + options.port = proxy.port; + options.path = location; - for (; index < length; index++) { - iteratee(coll[index], index, onlyOnce(iteratorCallback)); - } + // Basic proxy authorization + if (proxy.auth) { + var base64 = Buffer.from(proxy.auth.username + ':' + proxy.auth.password, 'utf8').toString('base64'); + options.headers['Proxy-Authorization'] = 'Basic ' + base64; + } + + // If a proxy is used, any redirects must also pass through the proxy + options.beforeRedirect = function beforeRedirect(redirection) { + redirection.headers.host = redirection.host; + setProxy(redirection, proxy, redirection.href); + }; +} + +/*eslint consistent-return:0*/ +module.exports = function httpAdapter(config) { + return new Promise(function dispatchHttpRequest(resolvePromise, rejectPromise) { + var resolve = function resolve(value) { + resolvePromise(value); + }; + var reject = function reject(value) { + rejectPromise(value); + }; + var data = config.data; + var headers = config.headers; + + // Set User-Agent (required by some servers) + // Only set header if it hasn't been set in config + // See https://github.com/axios/axios/issues/69 + if (!headers['User-Agent'] && !headers['user-agent']) { + headers['User-Agent'] = 'axios/' + pkg.version; } - // a generic version of eachOf which can handle array, object, and iterator cases. - function eachOfGeneric (coll, iteratee, callback) { - return eachOfLimit$2(coll, Infinity, iteratee, callback); + if (data && !utils.isStream(data)) { + if (Buffer.isBuffer(data)) { + // Nothing to do... + } else if (utils.isArrayBuffer(data)) { + data = Buffer.from(new Uint8Array(data)); + } else if (utils.isString(data)) { + data = Buffer.from(data, 'utf-8'); + } else { + return reject(createError( + 'Data after transformation must be a string, an ArrayBuffer, a Buffer, or a Stream', + config + )); + } + + // Add Content-Length header if data exists + headers['Content-Length'] = data.length; } - /** - * Like [`each`]{@link module:Collections.each}, except that it passes the key (or index) as the second argument - * to the iteratee. - * - * @name eachOf - * @static - * @memberOf module:Collections - * @method - * @alias forEachOf - * @category Collection - * @see [async.each]{@link module:Collections.each} - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - A function to apply to each - * item in `coll`. - * The `key` is the item's key, or index in the case of an array. - * Invoked with (item, key, callback). - * @param {Function} [callback] - A callback which is called when all - * `iteratee` functions have finished, or an error occurs. Invoked with (err). - * @returns {Promise} a promise, if a callback is omitted - * @example - * - * var obj = {dev: "/dev.json", test: "/test.json", prod: "/prod.json"}; - * var configs = {}; - * - * async.forEachOf(obj, function (value, key, callback) { - * fs.readFile(__dirname + value, "utf8", function (err, data) { - * if (err) return callback(err); - * try { - * configs[key] = JSON.parse(data); - * } catch (e) { - * return callback(e); - * } - * callback(); - * }); - * }, function (err) { - * if (err) console.error(err.message); - * // configs is now a map of JSON data - * doSomethingWith(configs); - * }); - */ - function eachOf(coll, iteratee, callback) { - var eachOfImplementation = isArrayLike(coll) ? eachOfArrayLike : eachOfGeneric; - return eachOfImplementation(coll, wrapAsync(iteratee), callback); + // HTTP basic authentication + var auth = undefined; + if (config.auth) { + var username = config.auth.username || ''; + var password = config.auth.password || ''; + auth = username + ':' + password; } - var eachOf$1 = awaitify(eachOf, 3); + // Parse url + var fullPath = buildFullPath(config.baseURL, config.url); + var parsed = url.parse(fullPath); + var protocol = parsed.protocol || 'http:'; - /** - * Produces a new collection of values by mapping each value in `coll` through - * the `iteratee` function. The `iteratee` is called with an item from `coll` - * and a callback for when it has finished processing. Each of these callback - * takes 2 arguments: an `error`, and the transformed item from `coll`. If - * `iteratee` passes an error to its callback, the main `callback` (for the - * `map` function) is immediately called with the error. - * - * Note, that since this function applies the `iteratee` to each item in - * parallel, there is no guarantee that the `iteratee` functions will complete - * in order. However, the results array will be in the same order as the - * original `coll`. - * - * If `map` is passed an Object, the results will be an Array. The results - * will roughly be in the order of the original Objects' keys (but this can - * vary across JavaScript engines). - * - * @name map - * @static - * @memberOf module:Collections - * @method - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with the transformed item. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Results is an Array of the - * transformed items from the `coll`. Invoked with (err, results). - * @returns {Promise} a promise, if no callback is passed - * @example - * - * async.map(['file1','file2','file3'], fs.stat, function(err, results) { - * // results is now an array of stats for each file - * }); - */ - function map (coll, iteratee, callback) { - return _asyncMap(eachOf$1, coll, iteratee, callback) + if (!auth && parsed.auth) { + var urlAuth = parsed.auth.split(':'); + var urlUsername = urlAuth[0] || ''; + var urlPassword = urlAuth[1] || ''; + auth = urlUsername + ':' + urlPassword; } - var map$1 = awaitify(map, 3); - - /** - * Applies the provided arguments to each function in the array, calling - * `callback` after all functions have completed. If you only provide the first - * argument, `fns`, then it will return a function which lets you pass in the - * arguments as if it were a single function call. If more arguments are - * provided, `callback` is required while `args` is still optional. The results - * for each of the applied async functions are passed to the final callback - * as an array. - * - * @name applyEach - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s - * to all call with the same arguments - * @param {...*} [args] - any number of separate arguments to pass to the - * function. - * @param {Function} [callback] - the final argument should be the callback, - * called when all functions have completed processing. - * @returns {AsyncFunction} - Returns a function that takes no args other than - * an optional callback, that is the result of applying the `args` to each - * of the functions. - * @example - * - * const appliedFn = async.applyEach([enableSearch, updateSchema], 'bucket') - * - * appliedFn((err, results) => { - * // results[0] is the results for `enableSearch` - * // results[1] is the results for `updateSchema` - * }); - * - * // partial application example: - * async.each( - * buckets, - * async (bucket) => async.applyEach([enableSearch, updateSchema], bucket)(), - * callback - * ); - */ - var applyEach$1 = applyEach(map$1); - /** - * The same as [`eachOf`]{@link module:Collections.eachOf} but runs only a single async operation at a time. - * - * @name eachOfSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.eachOf]{@link module:Collections.eachOf} - * @alias forEachOfSeries - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * Invoked with (item, key, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Invoked with (err). - * @returns {Promise} a promise, if a callback is omitted - */ - function eachOfSeries(coll, iteratee, callback) { - return eachOfLimit$2(coll, 1, iteratee, callback) + if (auth) { + delete headers.Authorization; } - var eachOfSeries$1 = awaitify(eachOfSeries, 3); - /** - * The same as [`map`]{@link module:Collections.map} but runs only a single async operation at a time. - * - * @name mapSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.map]{@link module:Collections.map} - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with the transformed item. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Results is an array of the - * transformed items from the `coll`. Invoked with (err, results). - * @returns {Promise} a promise, if no callback is passed - */ - function mapSeries (coll, iteratee, callback) { - return _asyncMap(eachOfSeries$1, coll, iteratee, callback) - } - var mapSeries$1 = awaitify(mapSeries, 3); + var isHttpsRequest = isHttps.test(protocol); + var agent = isHttpsRequest ? config.httpsAgent : config.httpAgent; - /** - * The same as [`applyEach`]{@link module:ControlFlow.applyEach} but runs only a single async operation at a time. - * - * @name applyEachSeries - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.applyEach]{@link module:ControlFlow.applyEach} - * @category Control Flow - * @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s to all - * call with the same arguments - * @param {...*} [args] - any number of separate arguments to pass to the - * function. - * @param {Function} [callback] - the final argument should be the callback, - * called when all functions have completed processing. - * @returns {AsyncFunction} - A function, that when called, is the result of - * appling the `args` to the list of functions. It takes no args, other than - * a callback. - */ - var applyEachSeries = applyEach(mapSeries$1); + var options = { + path: buildURL(parsed.path, config.params, config.paramsSerializer).replace(/^\?/, ''), + method: config.method.toUpperCase(), + headers: headers, + agent: agent, + agents: { http: config.httpAgent, https: config.httpsAgent }, + auth: auth + }; - const PROMISE_SYMBOL = Symbol('promiseCallback'); + if (config.socketPath) { + options.socketPath = config.socketPath; + } else { + options.hostname = parsed.hostname; + options.port = parsed.port; + } - function promiseCallback () { - let resolve, reject; - function callback (err, ...args) { - if (err) return reject(err) - resolve(args.length > 1 ? args : args[0]); - } + var proxy = config.proxy; + if (!proxy && proxy !== false) { + var proxyEnv = protocol.slice(0, -1) + '_proxy'; + var proxyUrl = process.env[proxyEnv] || process.env[proxyEnv.toUpperCase()]; + if (proxyUrl) { + var parsedProxyUrl = url.parse(proxyUrl); + var noProxyEnv = process.env.no_proxy || process.env.NO_PROXY; + var shouldProxy = true; - callback[PROMISE_SYMBOL] = new Promise((res, rej) => { - resolve = res, - reject = rej; - }); + if (noProxyEnv) { + var noProxy = noProxyEnv.split(',').map(function trim(s) { + return s.trim(); + }); - return callback - } + shouldProxy = !noProxy.some(function proxyMatch(proxyElement) { + if (!proxyElement) { + return false; + } + if (proxyElement === '*') { + return true; + } + if (proxyElement[0] === '.' && + parsed.hostname.substr(parsed.hostname.length - proxyElement.length) === proxyElement) { + return true; + } - /** - * Determines the best order for running the {@link AsyncFunction}s in `tasks`, based on - * their requirements. Each function can optionally depend on other functions - * being completed first, and each function is run as soon as its requirements - * are satisfied. - * - * If any of the {@link AsyncFunction}s pass an error to their callback, the `auto` sequence - * will stop. Further tasks will not execute (so any other functions depending - * on it will not run), and the main `callback` is immediately called with the - * error. - * - * {@link AsyncFunction}s also receive an object containing the results of functions which - * have completed so far as the first argument, if they have dependencies. If a - * task function has no dependencies, it will only be passed a callback. - * - * @name auto - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Object} tasks - An object. Each of its properties is either a - * function or an array of requirements, with the {@link AsyncFunction} itself the last item - * in the array. The object's key of a property serves as the name of the task - * defined by that property, i.e. can be used when specifying requirements for - * other tasks. The function receives one or two arguments: - * * a `results` object, containing the results of the previously executed - * functions, only passed if the task has any dependencies, - * * a `callback(err, result)` function, which must be called when finished, - * passing an `error` (which can be `null`) and the result of the function's - * execution. - * @param {number} [concurrency=Infinity] - An optional `integer` for - * determining the maximum number of tasks that can be run in parallel. By - * default, as many as possible. - * @param {Function} [callback] - An optional callback which is called when all - * the tasks have been completed. It receives the `err` argument if any `tasks` - * pass an error to their callback. Results are always returned; however, if an - * error occurs, no further `tasks` will be performed, and the results object - * will only contain partial results. Invoked with (err, results). - * @returns {Promise} a promise, if a callback is not passed - * @example - * - * async.auto({ - * // this function will just be passed a callback - * readData: async.apply(fs.readFile, 'data.txt', 'utf-8'), - * showData: ['readData', function(results, cb) { - * // results.readData is the file's contents - * // ... - * }] - * }, callback); - * - * async.auto({ - * get_data: function(callback) { - * console.log('in get_data'); - * // async code to get some data - * callback(null, 'data', 'converted to array'); - * }, - * make_folder: function(callback) { - * console.log('in make_folder'); - * // async code to create a directory to store a file in - * // this is run at the same time as getting the data - * callback(null, 'folder'); - * }, - * write_file: ['get_data', 'make_folder', function(results, callback) { - * console.log('in write_file', JSON.stringify(results)); - * // once there is some data and the directory exists, - * // write the data to a file in the directory - * callback(null, 'filename'); - * }], - * email_link: ['write_file', function(results, callback) { - * console.log('in email_link', JSON.stringify(results)); - * // once the file is written let's email a link to it... - * // results.write_file contains the filename returned by write_file. - * callback(null, {'file':results.write_file, 'email':'user@example.com'}); - * }] - * }, function(err, results) { - * console.log('err = ', err); - * console.log('results = ', results); - * }); - */ - function auto(tasks, concurrency, callback) { - if (typeof concurrency !== 'number') { - // concurrency is optional, shift the args. - callback = concurrency; - concurrency = null; - } - callback = once(callback || promiseCallback()); - var numTasks = Object.keys(tasks).length; - if (!numTasks) { - return callback(null); - } - if (!concurrency) { - concurrency = numTasks; + return parsed.hostname === proxyElement; + }); } - var results = {}; - var runningTasks = 0; - var canceled = false; - var hasError = false; + if (shouldProxy) { + proxy = { + host: parsedProxyUrl.hostname, + port: parsedProxyUrl.port, + protocol: parsedProxyUrl.protocol + }; - var listeners = Object.create(null); + if (parsedProxyUrl.auth) { + var proxyUrlAuth = parsedProxyUrl.auth.split(':'); + proxy.auth = { + username: proxyUrlAuth[0], + password: proxyUrlAuth[1] + }; + } + } + } + } - var readyTasks = []; + if (proxy) { + options.headers.host = parsed.hostname + (parsed.port ? ':' + parsed.port : ''); + setProxy(options, proxy, protocol + '//' + parsed.hostname + (parsed.port ? ':' + parsed.port : '') + options.path); + } - // for cycle detection: - var readyToCheck = []; // tasks that have been identified as reachable - // without the possibility of returning to an ancestor task - var uncheckedDependencies = {}; + var transport; + var isHttpsProxy = isHttpsRequest && (proxy ? isHttps.test(proxy.protocol) : true); + if (config.transport) { + transport = config.transport; + } else if (config.maxRedirects === 0) { + transport = isHttpsProxy ? https : http; + } else { + if (config.maxRedirects) { + options.maxRedirects = config.maxRedirects; + } + transport = isHttpsProxy ? httpsFollow : httpFollow; + } - Object.keys(tasks).forEach(key => { - var task = tasks[key]; - if (!Array.isArray(task)) { - // no dependencies - enqueueTask(key, [task]); - readyToCheck.push(key); - return; - } + if (config.maxBodyLength > -1) { + options.maxBodyLength = config.maxBodyLength; + } - var dependencies = task.slice(0, task.length - 1); - var remainingDependencies = dependencies.length; - if (remainingDependencies === 0) { - enqueueTask(key, task); - readyToCheck.push(key); - return; - } - uncheckedDependencies[key] = remainingDependencies; + // Create the request + var req = transport.request(options, function handleResponse(res) { + if (req.aborted) return; - dependencies.forEach(dependencyName => { - if (!tasks[dependencyName]) { - throw new Error('async.auto task `' + key + - '` has a non-existent dependency `' + - dependencyName + '` in ' + - dependencies.join(', ')); - } - addListener(dependencyName, () => { - remainingDependencies--; - if (remainingDependencies === 0) { - enqueueTask(key, task); - } - }); - }); - }); + // uncompress the response body transparently if required + var stream = res; - checkForDeadlocks(); - processQueue(); + // return the last request in case of redirects + var lastRequest = res.req || req; - function enqueueTask(key, task) { - readyTasks.push(() => runTask(key, task)); - } - function processQueue() { - if (canceled) return - if (readyTasks.length === 0 && runningTasks === 0) { - return callback(null, results); - } - while(readyTasks.length && runningTasks < concurrency) { - var run = readyTasks.shift(); - run(); - } + // if no content, is HEAD request or decompress disabled we should not decompress + if (res.statusCode !== 204 && lastRequest.method !== 'HEAD' && config.decompress !== false) { + switch (res.headers['content-encoding']) { + /*eslint default-case:0*/ + case 'gzip': + case 'compress': + case 'deflate': + // add the unzipper to the body stream processing pipeline + stream = stream.pipe(zlib.createUnzip()); + // remove the content-encoding in order to not confuse downstream operations + delete res.headers['content-encoding']; + break; } + } - function addListener(taskName, fn) { - var taskListeners = listeners[taskName]; - if (!taskListeners) { - taskListeners = listeners[taskName] = []; - } + var response = { + status: res.statusCode, + statusText: res.statusMessage, + headers: res.headers, + config: config, + request: lastRequest + }; - taskListeners.push(fn); - } + if (config.responseType === 'stream') { + response.data = stream; + settle(resolve, reject, response); + } else { + var responseBuffer = []; + stream.on('data', function handleStreamData(chunk) { + responseBuffer.push(chunk); - function taskComplete(taskName) { - var taskListeners = listeners[taskName] || []; - taskListeners.forEach(fn => fn()); - processQueue(); - } + // make sure the content length is not over the maxContentLength if specified + if (config.maxContentLength > -1 && Buffer.concat(responseBuffer).length > config.maxContentLength) { + stream.destroy(); + reject(createError('maxContentLength size of ' + config.maxContentLength + ' exceeded', + config, null, lastRequest)); + } + }); + stream.on('error', function handleStreamError(err) { + if (req.aborted) return; + reject(enhanceError(err, config, null, lastRequest)); + }); - function runTask(key, task) { - if (hasError) return; + stream.on('end', function handleStreamEnd() { + var responseData = Buffer.concat(responseBuffer); + if (config.responseType !== 'arraybuffer') { + responseData = responseData.toString(config.responseEncoding); + if (!config.responseEncoding || config.responseEncoding === 'utf8') { + responseData = utils.stripBOM(responseData); + } + } - var taskCallback = onlyOnce((err, ...result) => { - runningTasks--; - if (err === false) { - canceled = true; - return - } - if (result.length < 2) { - [result] = result; - } - if (err) { - var safeResults = {}; - Object.keys(results).forEach(rkey => { - safeResults[rkey] = results[rkey]; - }); - safeResults[key] = result; - hasError = true; - listeners = Object.create(null); - if (canceled) return - callback(err, safeResults); - } else { - results[key] = result; - taskComplete(key); - } - }); + response.data = responseData; + settle(resolve, reject, response); + }); + } + }); - runningTasks++; - var taskFn = wrapAsync(task[task.length - 1]); - if (task.length > 1) { - taskFn(results, taskCallback); - } else { - taskFn(taskCallback); - } - } + // Handle errors + req.on('error', function handleRequestError(err) { + if (req.aborted && err.code !== 'ERR_FR_TOO_MANY_REDIRECTS') return; + reject(enhanceError(err, config, null, req)); + }); - function checkForDeadlocks() { - // Kahn's algorithm - // https://en.wikipedia.org/wiki/Topological_sorting#Kahn.27s_algorithm - // http://connalle.blogspot.com/2013/10/topological-sortingkahn-algorithm.html - var currentTask; - var counter = 0; - while (readyToCheck.length) { - currentTask = readyToCheck.pop(); - counter++; - getDependents(currentTask).forEach(dependent => { - if (--uncheckedDependencies[dependent] === 0) { - readyToCheck.push(dependent); - } - }); - } + // Handle request timeout + if (config.timeout) { + // Sometime, the response will be very slow, and does not respond, the connect event will be block by event loop system. + // And timer callback will be fired, and abort() will be invoked before connection, then get "socket hang up" and code ECONNRESET. + // At this time, if we have a large number of request, nodejs will hang up some socket on background. and the number will up and up. + // And then these socket which be hang up will devoring CPU little by little. + // ClientRequest.setTimeout will be fired on the specify milliseconds, and can make sure that abort() will be fired after connect. + req.setTimeout(config.timeout, function handleRequestTimeout() { + req.abort(); + reject(createError('timeout of ' + config.timeout + 'ms exceeded', config, 'ECONNABORTED', req)); + }); + } - if (counter !== numTasks) { - throw new Error( - 'async.auto cannot execute tasks due to a recursive dependency' - ); - } - } + if (config.cancelToken) { + // Handle cancellation + config.cancelToken.promise.then(function onCanceled(cancel) { + if (req.aborted) return; - function getDependents(taskName) { - var result = []; - Object.keys(tasks).forEach(key => { - const task = tasks[key]; - if (Array.isArray(task) && task.indexOf(taskName) >= 0) { - result.push(key); - } - }); - return result; - } + req.abort(); + reject(cancel); + }); + } - return callback[PROMISE_SYMBOL] + // Send the request + if (utils.isStream(data)) { + data.on('error', function handleStreamError(err) { + reject(enhanceError(err, config, null, req)); + }).pipe(req); + } else { + req.end(data); } + }); +}; - var FN_ARGS = /^(?:async\s+)?(?:function)?\s*\w*\s*\(\s*([^)]+)\s*\)(?:\s*{)/; - var ARROW_FN_ARGS = /^(?:async\s+)?\(?\s*([^)=]+)\s*\)?(?:\s*=>)/; - var FN_ARG_SPLIT = /,/; - var FN_ARG = /(=.+)?(\s*)$/; - var STRIP_COMMENTS = /((\/\/.*$)|(\/\*[\s\S]*?\*\/))/mg; - function parseParams(func) { - const src = func.toString().replace(STRIP_COMMENTS, ''); - let match = src.match(FN_ARGS); - if (!match) { - match = src.match(ARROW_FN_ARGS); - } - if (!match) throw new Error('could not parse args in autoInject\nSource:\n' + src) - let [, args] = match; - return args - .replace(/\s/g, '') - .split(FN_ARG_SPLIT) - .map((arg) => arg.replace(FN_ARG, '').trim()); - } +/***/ }), - /** - * A dependency-injected version of the [async.auto]{@link module:ControlFlow.auto} function. Dependent - * tasks are specified as parameters to the function, after the usual callback - * parameter, with the parameter names matching the names of the tasks it - * depends on. This can provide even more readable task graphs which can be - * easier to maintain. - * - * If a final callback is specified, the task results are similarly injected, - * specified as named parameters after the initial error parameter. - * - * The autoInject function is purely syntactic sugar and its semantics are - * otherwise equivalent to [async.auto]{@link module:ControlFlow.auto}. - * - * @name autoInject - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.auto]{@link module:ControlFlow.auto} - * @category Control Flow - * @param {Object} tasks - An object, each of whose properties is an {@link AsyncFunction} of - * the form 'func([dependencies...], callback). The object's key of a property - * serves as the name of the task defined by that property, i.e. can be used - * when specifying requirements for other tasks. - * * The `callback` parameter is a `callback(err, result)` which must be called - * when finished, passing an `error` (which can be `null`) and the result of - * the function's execution. The remaining parameters name other tasks on - * which the task is dependent, and the results from those tasks are the - * arguments of those parameters. - * @param {Function} [callback] - An optional callback which is called when all - * the tasks have been completed. It receives the `err` argument if any `tasks` - * pass an error to their callback, and a `results` object with any completed - * task results, similar to `auto`. - * @returns {Promise} a promise, if no callback is passed - * @example - * - * // The example from `auto` can be rewritten as follows: - * async.autoInject({ - * get_data: function(callback) { - * // async code to get some data - * callback(null, 'data', 'converted to array'); - * }, - * make_folder: function(callback) { - * // async code to create a directory to store a file in - * // this is run at the same time as getting the data - * callback(null, 'folder'); - * }, - * write_file: function(get_data, make_folder, callback) { - * // once there is some data and the directory exists, - * // write the data to a file in the directory - * callback(null, 'filename'); - * }, - * email_link: function(write_file, callback) { - * // once the file is written let's email a link to it... - * // write_file contains the filename returned by write_file. - * callback(null, {'file':write_file, 'email':'user@example.com'}); - * } - * }, function(err, results) { - * console.log('err = ', err); - * console.log('email_link = ', results.email_link); - * }); - * - * // If you are using a JS minifier that mangles parameter names, `autoInject` - * // will not work with plain functions, since the parameter names will be - * // collapsed to a single letter identifier. To work around this, you can - * // explicitly specify the names of the parameters your task function needs - * // in an array, similar to Angular.js dependency injection. - * - * // This still has an advantage over plain `auto`, since the results a task - * // depends on are still spread into arguments. - * async.autoInject({ - * //... - * write_file: ['get_data', 'make_folder', function(get_data, make_folder, callback) { - * callback(null, 'filename'); - * }], - * email_link: ['write_file', function(write_file, callback) { - * callback(null, {'file':write_file, 'email':'user@example.com'}); - * }] - * //... - * }, function(err, results) { - * console.log('err = ', err); - * console.log('email_link = ', results.email_link); - * }); - */ - function autoInject(tasks, callback) { - var newTasks = {}; +/***/ 3454: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - Object.keys(tasks).forEach(key => { - var taskFn = tasks[key]; - var params; - var fnIsAsync = isAsync(taskFn); - var hasNoDeps = - (!fnIsAsync && taskFn.length === 1) || - (fnIsAsync && taskFn.length === 0); +"use strict"; - if (Array.isArray(taskFn)) { - params = [...taskFn]; - taskFn = params.pop(); - newTasks[key] = params.concat(params.length > 0 ? newTask : taskFn); - } else if (hasNoDeps) { - // no dependencies, use the function as-is - newTasks[key] = taskFn; - } else { - params = parseParams(taskFn); - if ((taskFn.length === 0 && !fnIsAsync) && params.length === 0) { - throw new Error("autoInject task functions require explicit parameters."); - } +var utils = __nccwpck_require__(20328); +var settle = __nccwpck_require__(13211); +var cookies = __nccwpck_require__(21545); +var buildURL = __nccwpck_require__(30646); +var buildFullPath = __nccwpck_require__(41934); +var parseHeaders = __nccwpck_require__(86455); +var isURLSameOrigin = __nccwpck_require__(33608); +var createError = __nccwpck_require__(15226); - // remove callback param - if (!fnIsAsync) params.pop(); +module.exports = function xhrAdapter(config) { + return new Promise(function dispatchXhrRequest(resolve, reject) { + var requestData = config.data; + var requestHeaders = config.headers; - newTasks[key] = params.concat(newTask); - } + if (utils.isFormData(requestData)) { + delete requestHeaders['Content-Type']; // Let the browser set it + } - function newTask(results, taskCb) { - var newArgs = params.map(name => results[name]); - newArgs.push(taskCb); - wrapAsync(taskFn)(...newArgs); - } - }); + var request = new XMLHttpRequest(); - return auto(newTasks, callback); + // HTTP basic authentication + if (config.auth) { + var username = config.auth.username || ''; + var password = config.auth.password ? unescape(encodeURIComponent(config.auth.password)) : ''; + requestHeaders.Authorization = 'Basic ' + btoa(username + ':' + password); } - // Simple doubly linked list (https://en.wikipedia.org/wiki/Doubly_linked_list) implementation - // used for queues. This implementation assumes that the node provided by the user can be modified - // to adjust the next and last properties. We implement only the minimal functionality - // for queue support. - class DLL { - constructor() { - this.head = this.tail = null; - this.length = 0; - } + var fullPath = buildFullPath(config.baseURL, config.url); + request.open(config.method.toUpperCase(), buildURL(fullPath, config.params, config.paramsSerializer), true); - removeLink(node) { - if (node.prev) node.prev.next = node.next; - else this.head = node.next; - if (node.next) node.next.prev = node.prev; - else this.tail = node.prev; + // Set the request timeout in MS + request.timeout = config.timeout; - node.prev = node.next = null; - this.length -= 1; - return node; - } + // Listen for ready state + request.onreadystatechange = function handleLoad() { + if (!request || request.readyState !== 4) { + return; + } - empty () { - while(this.head) this.shift(); - return this; - } + // The request errored out and we didn't get a response, this will be + // handled by onerror instead + // With one exception: request that using file: protocol, most browsers + // will return status as 0 even though it's a successful request + if (request.status === 0 && !(request.responseURL && request.responseURL.indexOf('file:') === 0)) { + return; + } - insertAfter(node, newNode) { - newNode.prev = node; - newNode.next = node.next; - if (node.next) node.next.prev = newNode; - else this.tail = newNode; - node.next = newNode; - this.length += 1; - } + // Prepare the response + var responseHeaders = 'getAllResponseHeaders' in request ? parseHeaders(request.getAllResponseHeaders()) : null; + var responseData = !config.responseType || config.responseType === 'text' ? request.responseText : request.response; + var response = { + data: responseData, + status: request.status, + statusText: request.statusText, + headers: responseHeaders, + config: config, + request: request + }; - insertBefore(node, newNode) { - newNode.prev = node.prev; - newNode.next = node; - if (node.prev) node.prev.next = newNode; - else this.head = newNode; - node.prev = newNode; - this.length += 1; - } + settle(resolve, reject, response); - unshift(node) { - if (this.head) this.insertBefore(this.head, node); - else setInitial(this, node); - } + // Clean up request + request = null; + }; - push(node) { - if (this.tail) this.insertAfter(this.tail, node); - else setInitial(this, node); - } + // Handle browser request cancellation (as opposed to a manual cancellation) + request.onabort = function handleAbort() { + if (!request) { + return; + } - shift() { - return this.head && this.removeLink(this.head); - } + reject(createError('Request aborted', config, 'ECONNABORTED', request)); - pop() { - return this.tail && this.removeLink(this.tail); - } + // Clean up request + request = null; + }; - toArray() { - return [...this] - } + // Handle low level network errors + request.onerror = function handleError() { + // Real errors are hidden from us by the browser + // onerror should only fire if it's a network error + reject(createError('Network Error', config, null, request)); - *[Symbol.iterator] () { - var cur = this.head; - while (cur) { - yield cur.data; - cur = cur.next; - } - } + // Clean up request + request = null; + }; - remove (testFn) { - var curr = this.head; - while(curr) { - var {next} = curr; - if (testFn(curr)) { - this.removeLink(curr); - } - curr = next; - } - return this; - } - } + // Handle timeout + request.ontimeout = function handleTimeout() { + var timeoutErrorMessage = 'timeout of ' + config.timeout + 'ms exceeded'; + if (config.timeoutErrorMessage) { + timeoutErrorMessage = config.timeoutErrorMessage; + } + reject(createError(timeoutErrorMessage, config, 'ECONNABORTED', + request)); - function setInitial(dll, node) { - dll.length = 1; - dll.head = dll.tail = node; + // Clean up request + request = null; + }; + + // Add xsrf header + // This is only done if running in a standard browser environment. + // Specifically not if we're in a web worker, or react-native. + if (utils.isStandardBrowserEnv()) { + // Add xsrf header + var xsrfValue = (config.withCredentials || isURLSameOrigin(fullPath)) && config.xsrfCookieName ? + cookies.read(config.xsrfCookieName) : + undefined; + + if (xsrfValue) { + requestHeaders[config.xsrfHeaderName] = xsrfValue; + } } - function queue(worker, concurrency, payload) { - if (concurrency == null) { - concurrency = 1; - } - else if(concurrency === 0) { - throw new RangeError('Concurrency must not be zero'); + // Add headers to the request + if ('setRequestHeader' in request) { + utils.forEach(requestHeaders, function setRequestHeader(val, key) { + if (typeof requestData === 'undefined' && key.toLowerCase() === 'content-type') { + // Remove Content-Type if data is undefined + delete requestHeaders[key]; + } else { + // Otherwise add header to the request + request.setRequestHeader(key, val); } + }); + } - var _worker = wrapAsync(worker); - var numRunning = 0; - var workersList = []; - const events = { - error: [], - drain: [], - saturated: [], - unsaturated: [], - empty: [] - }; + // Add withCredentials to request if needed + if (!utils.isUndefined(config.withCredentials)) { + request.withCredentials = !!config.withCredentials; + } - function on (event, handler) { - events[event].push(handler); + // Add responseType to request if needed + if (config.responseType) { + try { + request.responseType = config.responseType; + } catch (e) { + // Expected DOMException thrown by browsers not compatible XMLHttpRequest Level 2. + // But, this can be suppressed for 'json' type as it can be parsed by default 'transformResponse' function. + if (config.responseType !== 'json') { + throw e; } + } + } - function once (event, handler) { - const handleAndRemove = (...args) => { - off(event, handleAndRemove); - handler(...args); - }; - events[event].push(handleAndRemove); - } + // Handle progress if needed + if (typeof config.onDownloadProgress === 'function') { + request.addEventListener('progress', config.onDownloadProgress); + } - function off (event, handler) { - if (!event) return Object.keys(events).forEach(ev => events[ev] = []) - if (!handler) return events[event] = [] - events[event] = events[event].filter(ev => ev !== handler); - } + // Not all browsers support upload events + if (typeof config.onUploadProgress === 'function' && request.upload) { + request.upload.addEventListener('progress', config.onUploadProgress); + } - function trigger (event, ...args) { - events[event].forEach(handler => handler(...args)); + if (config.cancelToken) { + // Handle cancellation + config.cancelToken.promise.then(function onCanceled(cancel) { + if (!request) { + return; } - var processingScheduled = false; - function _insert(data, insertAtFront, rejectOnError, callback) { - if (callback != null && typeof callback !== 'function') { - throw new Error('task callback must be a function'); - } - q.started = true; + request.abort(); + reject(cancel); + // Clean up request + request = null; + }); + } - var res, rej; - function promiseCallback (err, ...args) { - // we don't care about the error, let the global error handler - // deal with it - if (err) return rejectOnError ? rej(err) : res() - if (args.length <= 1) return res(args[0]) - res(args); - } + if (!requestData) { + requestData = null; + } - var item = { - data, - callback: rejectOnError ? - promiseCallback : - (callback || promiseCallback) - }; + // Send the request + request.send(requestData); + }); +}; - if (insertAtFront) { - q._tasks.unshift(item); - } else { - q._tasks.push(item); - } - if (!processingScheduled) { - processingScheduled = true; - setImmediate$1(() => { - processingScheduled = false; - q.process(); - }); - } +/***/ }), - if (rejectOnError || !callback) { - return new Promise((resolve, reject) => { - res = resolve; - rej = reject; - }) - } - } +/***/ 52618: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - function _createCB(tasks) { - return function (err, ...args) { - numRunning -= 1; +"use strict"; - for (var i = 0, l = tasks.length; i < l; i++) { - var task = tasks[i]; - var index = workersList.indexOf(task); - if (index === 0) { - workersList.shift(); - } else if (index > 0) { - workersList.splice(index, 1); - } +var utils = __nccwpck_require__(20328); +var bind = __nccwpck_require__(77065); +var Axios = __nccwpck_require__(98178); +var mergeConfig = __nccwpck_require__(74831); +var defaults = __nccwpck_require__(98190); - task.callback(err, ...args); +/** + * Create an instance of Axios + * + * @param {Object} defaultConfig The default config for the instance + * @return {Axios} A new instance of Axios + */ +function createInstance(defaultConfig) { + var context = new Axios(defaultConfig); + var instance = bind(Axios.prototype.request, context); - if (err != null) { - trigger('error', err, task.data); - } - } + // Copy axios.prototype to instance + utils.extend(instance, Axios.prototype, context); - if (numRunning <= (q.concurrency - q.buffer) ) { - trigger('unsaturated'); - } - - if (q.idle()) { - trigger('drain'); - } - q.process(); - }; - } + // Copy context to instance + utils.extend(instance, context); - function _maybeDrain(data) { - if (data.length === 0 && q.idle()) { - // call drain immediately if there are no tasks - setImmediate$1(() => trigger('drain')); - return true - } - return false - } + return instance; +} - const eventMethod = (name) => (handler) => { - if (!handler) { - return new Promise((resolve, reject) => { - once(name, (err, data) => { - if (err) return reject(err) - resolve(data); - }); - }) - } - off(name); - on(name, handler); +// Create the default instance to be exported +var axios = createInstance(defaults); - }; +// Expose Axios class to allow class inheritance +axios.Axios = Axios; - var isProcessing = false; - var q = { - _tasks: new DLL(), - *[Symbol.iterator] () { - yield* q._tasks[Symbol.iterator](); - }, - concurrency, - payload, - buffer: concurrency / 4, - started: false, - paused: false, - push (data, callback) { - if (Array.isArray(data)) { - if (_maybeDrain(data)) return - return data.map(datum => _insert(datum, false, false, callback)) - } - return _insert(data, false, false, callback); - }, - pushAsync (data, callback) { - if (Array.isArray(data)) { - if (_maybeDrain(data)) return - return data.map(datum => _insert(datum, false, true, callback)) - } - return _insert(data, false, true, callback); - }, - kill () { - off(); - q._tasks.empty(); - }, - unshift (data, callback) { - if (Array.isArray(data)) { - if (_maybeDrain(data)) return - return data.map(datum => _insert(datum, true, false, callback)) - } - return _insert(data, true, false, callback); - }, - unshiftAsync (data, callback) { - if (Array.isArray(data)) { - if (_maybeDrain(data)) return - return data.map(datum => _insert(datum, true, true, callback)) - } - return _insert(data, true, true, callback); - }, - remove (testFn) { - q._tasks.remove(testFn); - }, - process () { - // Avoid trying to start too many processing operations. This can occur - // when callbacks resolve synchronously (#1267). - if (isProcessing) { - return; - } - isProcessing = true; - while(!q.paused && numRunning < q.concurrency && q._tasks.length){ - var tasks = [], data = []; - var l = q._tasks.length; - if (q.payload) l = Math.min(l, q.payload); - for (var i = 0; i < l; i++) { - var node = q._tasks.shift(); - tasks.push(node); - workersList.push(node); - data.push(node.data); - } +// Factory for creating new instances +axios.create = function create(instanceConfig) { + return createInstance(mergeConfig(axios.defaults, instanceConfig)); +}; - numRunning += 1; +// Expose Cancel & CancelToken +axios.Cancel = __nccwpck_require__(98875); +axios.CancelToken = __nccwpck_require__(71587); +axios.isCancel = __nccwpck_require__(64057); - if (q._tasks.length === 0) { - trigger('empty'); - } +// Expose all/spread +axios.all = function all(promises) { + return Promise.all(promises); +}; +axios.spread = __nccwpck_require__(74850); - if (numRunning === q.concurrency) { - trigger('saturated'); - } +// Expose isAxiosError +axios.isAxiosError = __nccwpck_require__(60650); - var cb = onlyOnce(_createCB(tasks)); - _worker(data, cb); - } - isProcessing = false; - }, - length () { - return q._tasks.length; - }, - running () { - return numRunning; - }, - workersList () { - return workersList; - }, - idle() { - return q._tasks.length + numRunning === 0; - }, - pause () { - q.paused = true; - }, - resume () { - if (q.paused === false) { return; } - q.paused = false; - setImmediate$1(q.process); - } - }; - // define these as fixed properties, so people get useful errors when updating - Object.defineProperties(q, { - saturated: { - writable: false, - value: eventMethod('saturated') - }, - unsaturated: { - writable: false, - value: eventMethod('unsaturated') - }, - empty: { - writable: false, - value: eventMethod('empty') - }, - drain: { - writable: false, - value: eventMethod('drain') - }, - error: { - writable: false, - value: eventMethod('error') - }, - }); - return q; - } +module.exports = axios; - /** - * Creates a `cargo` object with the specified payload. Tasks added to the - * cargo will be processed altogether (up to the `payload` limit). If the - * `worker` is in progress, the task is queued until it becomes available. Once - * the `worker` has completed some tasks, each callback of those tasks is - * called. Check out [these](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) [animations](https://camo.githubusercontent.com/f4810e00e1c5f5f8addbe3e9f49064fd5d102699/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130312f38346339323036362d356632392d313165322d383134662d3964336430323431336266642e676966) - * for how `cargo` and `queue` work. - * - * While [`queue`]{@link module:ControlFlow.queue} passes only one task to one of a group of workers - * at a time, cargo passes an array of tasks to a single worker, repeating - * when the worker is finished. - * - * @name cargo - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.queue]{@link module:ControlFlow.queue} - * @category Control Flow - * @param {AsyncFunction} worker - An asynchronous function for processing an array - * of queued tasks. Invoked with `(tasks, callback)`. - * @param {number} [payload=Infinity] - An optional `integer` for determining - * how many tasks should be processed per round; if omitted, the default is - * unlimited. - * @returns {module:ControlFlow.QueueObject} A cargo object to manage the tasks. Callbacks can - * attached as certain properties to listen for specific events during the - * lifecycle of the cargo and inner queue. - * @example - * - * // create a cargo object with payload 2 - * var cargo = async.cargo(function(tasks, callback) { - * for (var i=0; i { - _iteratee(memo, x, (err, v) => { - memo = v; - iterCb(err); - }); - }, err => callback(err, memo)); - } - var reduce$1 = awaitify(reduce, 4); +/***/ }), - /** - * Version of the compose function that is more natural to read. Each function - * consumes the return value of the previous function. It is the equivalent of - * [compose]{@link module:ControlFlow.compose} with the arguments reversed. - * - * Each function is executed with the `this` binding of the composed function. - * - * @name seq - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.compose]{@link module:ControlFlow.compose} - * @category Control Flow - * @param {...AsyncFunction} functions - the asynchronous functions to compose - * @returns {Function} a function that composes the `functions` in order - * @example - * - * // Requires lodash (or underscore), express3 and dresende's orm2. - * // Part of an app, that fetches cats of the logged user. - * // This example uses `seq` function to avoid overnesting and error - * // handling clutter. - * app.get('/cats', function(request, response) { - * var User = request.models.User; - * async.seq( - * _.bind(User.get, User), // 'User.get' has signature (id, callback(err, data)) - * function(user, fn) { - * user.getCats(fn); // 'getCats' has signature (callback(err, data)) - * } - * )(req.session.user_id, function (err, cats) { - * if (err) { - * console.error(err); - * response.json({ status: 'error', message: err.message }); - * } else { - * response.json({ status: 'ok', message: 'Cats found', data: cats }); - * } - * }); - * }); - */ - function seq(...functions) { - var _functions = functions.map(wrapAsync); - return function (...args) { - var that = this; +/***/ 98875: +/***/ ((module) => { - var cb = args[args.length - 1]; - if (typeof cb == 'function') { - args.pop(); - } else { - cb = promiseCallback(); - } +"use strict"; - reduce$1(_functions, args, (newargs, fn, iterCb) => { - fn.apply(that, newargs.concat((err, ...nextargs) => { - iterCb(err, nextargs); - })); - }, - (err, results) => cb(err, ...results)); - return cb[PROMISE_SYMBOL] - }; - } +/** + * A `Cancel` is an object that is thrown when an operation is canceled. + * + * @class + * @param {string=} message The message. + */ +function Cancel(message) { + this.message = message; +} - /** - * Creates a function which is a composition of the passed asynchronous - * functions. Each function consumes the return value of the function that - * follows. Composing functions `f()`, `g()`, and `h()` would produce the result - * of `f(g(h()))`, only this version uses callbacks to obtain the return values. - * - * If the last argument to the composed function is not a function, a promise - * is returned when you call it. - * - * Each function is executed with the `this` binding of the composed function. - * - * @name compose - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {...AsyncFunction} functions - the asynchronous functions to compose - * @returns {Function} an asynchronous function that is the composed - * asynchronous `functions` - * @example - * - * function add1(n, callback) { - * setTimeout(function () { - * callback(null, n + 1); - * }, 10); - * } - * - * function mul3(n, callback) { - * setTimeout(function () { - * callback(null, n * 3); - * }, 10); - * } - * - * var add1mul3 = async.compose(mul3, add1); - * add1mul3(4, function (err, result) { - * // result now equals 15 - * }); - */ - function compose(...args) { - return seq(...args.reverse()); - } +Cancel.prototype.toString = function toString() { + return 'Cancel' + (this.message ? ': ' + this.message : ''); +}; - /** - * The same as [`map`]{@link module:Collections.map} but runs a maximum of `limit` async operations at a time. - * - * @name mapLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.map]{@link module:Collections.map} - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with the transformed item. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Results is an array of the - * transformed items from the `coll`. Invoked with (err, results). - * @returns {Promise} a promise, if no callback is passed - */ - function mapLimit (coll, limit, iteratee, callback) { - return _asyncMap(eachOfLimit(limit), coll, iteratee, callback) - } - var mapLimit$1 = awaitify(mapLimit, 4); +Cancel.prototype.__CANCEL__ = true; - /** - * The same as [`concat`]{@link module:Collections.concat} but runs a maximum of `limit` async operations at a time. - * - * @name concatLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.concat]{@link module:Collections.concat} - * @category Collection - * @alias flatMapLimit - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, - * which should use an array as its result. Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished, or an error occurs. Results is an array - * containing the concatenated results of the `iteratee` function. Invoked with - * (err, results). - * @returns A Promise, if no callback is passed - */ - function concatLimit(coll, limit, iteratee, callback) { - var _iteratee = wrapAsync(iteratee); - return mapLimit$1(coll, limit, (val, iterCb) => { - _iteratee(val, (err, ...args) => { - if (err) return iterCb(err); - return iterCb(err, args); - }); - }, (err, mapResults) => { - var result = []; - for (var i = 0; i < mapResults.length; i++) { - if (mapResults[i]) { - result = result.concat(...mapResults[i]); - } - } +module.exports = Cancel; - return callback(err, result); - }); - } - var concatLimit$1 = awaitify(concatLimit, 4); - /** - * Applies `iteratee` to each item in `coll`, concatenating the results. Returns - * the concatenated list. The `iteratee`s are called in parallel, and the - * results are concatenated as they return. The results array will be returned in - * the original order of `coll` passed to the `iteratee` function. - * - * @name concat - * @static - * @memberOf module:Collections - * @method - * @category Collection - * @alias flatMap - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, - * which should use an array as its result. Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished, or an error occurs. Results is an array - * containing the concatenated results of the `iteratee` function. Invoked with - * (err, results). - * @returns A Promise, if no callback is passed - * @example - * - * async.concat(['dir1','dir2','dir3'], fs.readdir, function(err, files) { - * // files is now a list of filenames that exist in the 3 directories - * }); - */ - function concat(coll, iteratee, callback) { - return concatLimit$1(coll, Infinity, iteratee, callback) - } - var concat$1 = awaitify(concat, 3); +/***/ }), - /** - * The same as [`concat`]{@link module:Collections.concat} but runs only a single async operation at a time. - * - * @name concatSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.concat]{@link module:Collections.concat} - * @category Collection - * @alias flatMapSeries - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`. - * The iteratee should complete with an array an array of results. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished, or an error occurs. Results is an array - * containing the concatenated results of the `iteratee` function. Invoked with - * (err, results). - * @returns A Promise, if no callback is passed - */ - function concatSeries(coll, iteratee, callback) { - return concatLimit$1(coll, 1, iteratee, callback) - } - var concatSeries$1 = awaitify(concatSeries, 3); +/***/ 71587: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - /** - * Returns a function that when called, calls-back with the values provided. - * Useful as the first function in a [`waterfall`]{@link module:ControlFlow.waterfall}, or for plugging values in to - * [`auto`]{@link module:ControlFlow.auto}. - * - * @name constant - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {...*} arguments... - Any number of arguments to automatically invoke - * callback with. - * @returns {AsyncFunction} Returns a function that when invoked, automatically - * invokes the callback with the previous given arguments. - * @example - * - * async.waterfall([ - * async.constant(42), - * function (value, next) { - * // value === 42 - * }, - * //... - * ], callback); - * - * async.waterfall([ - * async.constant(filename, "utf8"), - * fs.readFile, - * function (fileData, next) { - * //... - * } - * //... - * ], callback); - * - * async.auto({ - * hostname: async.constant("https://server.net/"), - * port: findFreePort, - * launchServer: ["hostname", "port", function (options, cb) { - * startServer(options, cb); - * }], - * //... - * }, callback); - */ - function constant(...args) { - return function (...ignoredArgs/*, callback*/) { - var callback = ignoredArgs.pop(); - return callback(null, ...args); - }; - } +"use strict"; - function _createTester(check, getResult) { - return (eachfn, arr, _iteratee, cb) => { - var testPassed = false; - var testResult; - const iteratee = wrapAsync(_iteratee); - eachfn(arr, (value, _, callback) => { - iteratee(value, (err, result) => { - if (err || err === false) return callback(err); - if (check(result) && !testResult) { - testPassed = true; - testResult = getResult(true, value); - return callback(null, breakLoop); - } - callback(); - }); - }, err => { - if (err) return cb(err); - cb(null, testPassed ? testResult : getResult(false)); - }); - }; - } +var Cancel = __nccwpck_require__(98875); - /** - * Returns the first value in `coll` that passes an async truth test. The - * `iteratee` is applied in parallel, meaning the first iteratee to return - * `true` will fire the detect `callback` with that result. That means the - * result might not be the first item in the original `coll` (in terms of order) - * that passes the test. +/** + * A `CancelToken` is an object that can be used to request cancellation of an operation. + * + * @class + * @param {Function} executor The executor function. + */ +function CancelToken(executor) { + if (typeof executor !== 'function') { + throw new TypeError('executor must be a function.'); + } - * If order within the original `coll` is important, then look at - * [`detectSeries`]{@link module:Collections.detectSeries}. - * - * @name detect - * @static - * @memberOf module:Collections - * @method - * @alias find - * @category Collections - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. - * The iteratee must complete with a boolean value as its result. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the `iteratee` functions have finished. - * Result will be the first item in the array that passes the truth test - * (iteratee) or the value `undefined` if none passed. Invoked with - * (err, result). - * @returns A Promise, if no callback is passed - * @example - * - * async.detect(['file1','file2','file3'], function(filePath, callback) { - * fs.access(filePath, function(err) { - * callback(null, !err) - * }); - * }, function(err, result) { - * // result now equals the first file in the list that exists - * }); - */ - function detect(coll, iteratee, callback) { - return _createTester(bool => bool, (res, item) => item)(eachOf$1, coll, iteratee, callback) - } - var detect$1 = awaitify(detect, 3); + var resolvePromise; + this.promise = new Promise(function promiseExecutor(resolve) { + resolvePromise = resolve; + }); - /** - * The same as [`detect`]{@link module:Collections.detect} but runs a maximum of `limit` async operations at a - * time. - * - * @name detectLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.detect]{@link module:Collections.detect} - * @alias findLimit - * @category Collections - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. - * The iteratee must complete with a boolean value as its result. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the `iteratee` functions have finished. - * Result will be the first item in the array that passes the truth test - * (iteratee) or the value `undefined` if none passed. Invoked with - * (err, result). - * @returns a Promise if no callback is passed - */ - function detectLimit(coll, limit, iteratee, callback) { - return _createTester(bool => bool, (res, item) => item)(eachOfLimit(limit), coll, iteratee, callback) + var token = this; + executor(function cancel(message) { + if (token.reason) { + // Cancellation has already been requested + return; } - var detectLimit$1 = awaitify(detectLimit, 4); - /** - * The same as [`detect`]{@link module:Collections.detect} but runs only a single async operation at a time. - * - * @name detectSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.detect]{@link module:Collections.detect} - * @alias findSeries - * @category Collections - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. - * The iteratee must complete with a boolean value as its result. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the `iteratee` functions have finished. - * Result will be the first item in the array that passes the truth test - * (iteratee) or the value `undefined` if none passed. Invoked with - * (err, result). - * @returns a Promise if no callback is passed - */ - function detectSeries(coll, iteratee, callback) { - return _createTester(bool => bool, (res, item) => item)(eachOfLimit(1), coll, iteratee, callback) - } + token.reason = new Cancel(message); + resolvePromise(token.reason); + }); +} - var detectSeries$1 = awaitify(detectSeries, 3); +/** + * Throws a `Cancel` if cancellation has been requested. + */ +CancelToken.prototype.throwIfRequested = function throwIfRequested() { + if (this.reason) { + throw this.reason; + } +}; - function consoleFunc(name) { - return (fn, ...args) => wrapAsync(fn)(...args, (err, ...resultArgs) => { - if (typeof console === 'object') { - if (err) { - if (console.error) { - console.error(err); - } - } else if (console[name]) { - resultArgs.forEach(x => console[name](x)); - } - } - }) - } +/** + * Returns an object that contains a new `CancelToken` and a function that, when called, + * cancels the `CancelToken`. + */ +CancelToken.source = function source() { + var cancel; + var token = new CancelToken(function executor(c) { + cancel = c; + }); + return { + token: token, + cancel: cancel + }; +}; - /** - * Logs the result of an [`async` function]{@link AsyncFunction} to the - * `console` using `console.dir` to display the properties of the resulting object. - * Only works in Node.js or in browsers that support `console.dir` and - * `console.error` (such as FF and Chrome). - * If multiple arguments are returned from the async function, - * `console.dir` is called on each argument in order. - * - * @name dir - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {AsyncFunction} function - The function you want to eventually apply - * all arguments to. - * @param {...*} arguments... - Any number of arguments to apply to the function. - * @example - * - * // in a module - * var hello = function(name, callback) { - * setTimeout(function() { - * callback(null, {hello: name}); - * }, 1000); - * }; - * - * // in the node repl - * node> async.dir(hello, 'world'); - * {hello: 'world'} - */ - var dir = consoleFunc('dir'); +module.exports = CancelToken; - /** - * The post-check version of [`whilst`]{@link module:ControlFlow.whilst}. To reflect the difference in - * the order of operations, the arguments `test` and `iteratee` are switched. - * - * `doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript. - * - * @name doWhilst - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.whilst]{@link module:ControlFlow.whilst} - * @category Control Flow - * @param {AsyncFunction} iteratee - A function which is called each time `test` - * passes. Invoked with (callback). - * @param {AsyncFunction} test - asynchronous truth test to perform after each - * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the - * non-error args from the previous callback of `iteratee`. - * @param {Function} [callback] - A callback which is called after the test - * function has failed and repeated execution of `iteratee` has stopped. - * `callback` will be passed an error and any arguments passed to the final - * `iteratee`'s callback. Invoked with (err, [results]); - * @returns {Promise} a promise, if no callback is passed - */ - function doWhilst(iteratee, test, callback) { - callback = onlyOnce(callback); - var _fn = wrapAsync(iteratee); - var _test = wrapAsync(test); - var results; - function next(err, ...args) { - if (err) return callback(err); - if (err === false) return; - results = args; - _test(...args, check); - } +/***/ }), - function check(err, truth) { - if (err) return callback(err); - if (err === false) return; - if (!truth) return callback(null, ...results); - _fn(next); - } +/***/ 64057: +/***/ ((module) => { - return check(null, true); - } +"use strict"; - var doWhilst$1 = awaitify(doWhilst, 3); - /** - * Like ['doWhilst']{@link module:ControlFlow.doWhilst}, except the `test` is inverted. Note the - * argument ordering differs from `until`. - * - * @name doUntil - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.doWhilst]{@link module:ControlFlow.doWhilst} - * @category Control Flow - * @param {AsyncFunction} iteratee - An async function which is called each time - * `test` fails. Invoked with (callback). - * @param {AsyncFunction} test - asynchronous truth test to perform after each - * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the - * non-error args from the previous callback of `iteratee` - * @param {Function} [callback] - A callback which is called after the test - * function has passed and repeated execution of `iteratee` has stopped. `callback` - * will be passed an error and any arguments passed to the final `iteratee`'s - * callback. Invoked with (err, [results]); - * @returns {Promise} a promise, if no callback is passed - */ - function doUntil(iteratee, test, callback) { - const _test = wrapAsync(test); - return doWhilst$1(iteratee, (...args) => { - const cb = args.pop(); - _test(...args, (err, truth) => cb (err, !truth)); - }, callback); - } +module.exports = function isCancel(value) { + return !!(value && value.__CANCEL__); +}; - function _withoutIndex(iteratee) { - return (value, index, callback) => iteratee(value, callback); - } - /** - * Applies the function `iteratee` to each item in `coll`, in parallel. - * The `iteratee` is called with an item from the list, and a callback for when - * it has finished. If the `iteratee` passes an error to its `callback`, the - * main `callback` (for the `each` function) is immediately called with the - * error. - * - * Note, that since this function applies `iteratee` to each item in parallel, - * there is no guarantee that the iteratee functions will complete in order. - * - * @name each - * @static - * @memberOf module:Collections - * @method - * @alias forEach - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to - * each item in `coll`. Invoked with (item, callback). - * The array index is not passed to the iteratee. - * If you need the index, use `eachOf`. - * @param {Function} [callback] - A callback which is called when all - * `iteratee` functions have finished, or an error occurs. Invoked with (err). - * @returns {Promise} a promise, if a callback is omitted - * @example - * - * // assuming openFiles is an array of file names and saveFile is a function - * // to save the modified contents of that file: - * - * async.each(openFiles, saveFile, function(err){ - * // if any of the saves produced an error, err would equal that error - * }); - * - * // assuming openFiles is an array of file names - * async.each(openFiles, function(file, callback) { - * - * // Perform operation on file here. - * console.log('Processing file ' + file); - * - * if( file.length > 32 ) { - * console.log('This file name is too long'); - * callback('File name too long'); - * } else { - * // Do work to process file here - * console.log('File processed'); - * callback(); - * } - * }, function(err) { - * // if any of the file processing produced an error, err would equal that error - * if( err ) { - * // One of the iterations produced an error. - * // All processing will now stop. - * console.log('A file failed to process'); - * } else { - * console.log('All files have been processed successfully'); - * } - * }); - */ - function eachLimit(coll, iteratee, callback) { - return eachOf$1(coll, _withoutIndex(wrapAsync(iteratee)), callback); - } +/***/ }), - var each = awaitify(eachLimit, 3); +/***/ 98178: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - /** - * The same as [`each`]{@link module:Collections.each} but runs a maximum of `limit` async operations at a time. - * - * @name eachLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.each]{@link module:Collections.each} - * @alias forEachLimit - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The array index is not passed to the iteratee. - * If you need the index, use `eachOfLimit`. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called when all - * `iteratee` functions have finished, or an error occurs. Invoked with (err). - * @returns {Promise} a promise, if a callback is omitted - */ - function eachLimit$1(coll, limit, iteratee, callback) { - return eachOfLimit(limit)(coll, _withoutIndex(wrapAsync(iteratee)), callback); - } - var eachLimit$2 = awaitify(eachLimit$1, 4); +"use strict"; - /** - * The same as [`each`]{@link module:Collections.each} but runs only a single async operation at a time. - * - * Note, that unlike [`each`]{@link module:Collections.each}, this function applies iteratee to each item - * in series and therefore the iteratee functions will complete in order. - * @name eachSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.each]{@link module:Collections.each} - * @alias forEachSeries - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each - * item in `coll`. - * The array index is not passed to the iteratee. - * If you need the index, use `eachOfSeries`. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called when all - * `iteratee` functions have finished, or an error occurs. Invoked with (err). - * @returns {Promise} a promise, if a callback is omitted - */ - function eachSeries(coll, iteratee, callback) { - return eachLimit$2(coll, 1, iteratee, callback) - } - var eachSeries$1 = awaitify(eachSeries, 3); +var utils = __nccwpck_require__(20328); +var buildURL = __nccwpck_require__(30646); +var InterceptorManager = __nccwpck_require__(3214); +var dispatchRequest = __nccwpck_require__(85062); +var mergeConfig = __nccwpck_require__(74831); - /** - * Wrap an async function and ensure it calls its callback on a later tick of - * the event loop. If the function already calls its callback on a next tick, - * no extra deferral is added. This is useful for preventing stack overflows - * (`RangeError: Maximum call stack size exceeded`) and generally keeping - * [Zalgo](http://blog.izs.me/post/59142742143/designing-apis-for-asynchrony) - * contained. ES2017 `async` functions are returned as-is -- they are immune - * to Zalgo's corrupting influences, as they always resolve on a later tick. - * - * @name ensureAsync - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {AsyncFunction} fn - an async function, one that expects a node-style - * callback as its last argument. - * @returns {AsyncFunction} Returns a wrapped function with the exact same call - * signature as the function passed in. - * @example - * - * function sometimesAsync(arg, callback) { - * if (cache[arg]) { - * return callback(null, cache[arg]); // this would be synchronous!! - * } else { - * doSomeIO(arg, callback); // this IO would be asynchronous - * } - * } - * - * // this has a risk of stack overflows if many results are cached in a row - * async.mapSeries(args, sometimesAsync, done); - * - * // this will defer sometimesAsync's callback if necessary, - * // preventing stack overflows - * async.mapSeries(args, async.ensureAsync(sometimesAsync), done); - */ - function ensureAsync(fn) { - if (isAsync(fn)) return fn; - return function (...args/*, callback*/) { - var callback = args.pop(); - var sync = true; - args.push((...innerArgs) => { - if (sync) { - setImmediate$1(() => callback(...innerArgs)); - } else { - callback(...innerArgs); - } - }); - fn.apply(this, args); - sync = false; - }; - } +/** + * Create a new instance of Axios + * + * @param {Object} instanceConfig The default config for the instance + */ +function Axios(instanceConfig) { + this.defaults = instanceConfig; + this.interceptors = { + request: new InterceptorManager(), + response: new InterceptorManager() + }; +} - /** - * Returns `true` if every element in `coll` satisfies an async test. If any - * iteratee call returns `false`, the main `callback` is immediately called. - * - * @name every - * @static - * @memberOf module:Collections - * @method - * @alias all - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collection in parallel. - * The iteratee must complete with a boolean result value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result will be either `true` or `false` - * depending on the values of the async tests. Invoked with (err, result). - * @returns {Promise} a promise, if no callback provided - * @example - * - * async.every(['file1','file2','file3'], function(filePath, callback) { - * fs.access(filePath, function(err) { - * callback(null, !err) - * }); - * }, function(err, result) { - * // if result is true then every file exists - * }); - */ - function every(coll, iteratee, callback) { - return _createTester(bool => !bool, res => !res)(eachOf$1, coll, iteratee, callback) - } - var every$1 = awaitify(every, 3); +/** + * Dispatch a request + * + * @param {Object} config The config specific for this request (merged with this.defaults) + */ +Axios.prototype.request = function request(config) { + /*eslint no-param-reassign:0*/ + // Allow for axios('example/url'[, config]) a la fetch API + if (typeof config === 'string') { + config = arguments[1] || {}; + config.url = arguments[0]; + } else { + config = config || {}; + } - /** - * The same as [`every`]{@link module:Collections.every} but runs a maximum of `limit` async operations at a time. - * - * @name everyLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.every]{@link module:Collections.every} - * @alias allLimit - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collection in parallel. - * The iteratee must complete with a boolean result value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result will be either `true` or `false` - * depending on the values of the async tests. Invoked with (err, result). - * @returns {Promise} a promise, if no callback provided - */ - function everyLimit(coll, limit, iteratee, callback) { - return _createTester(bool => !bool, res => !res)(eachOfLimit(limit), coll, iteratee, callback) - } - var everyLimit$1 = awaitify(everyLimit, 4); + config = mergeConfig(this.defaults, config); - /** - * The same as [`every`]{@link module:Collections.every} but runs only a single async operation at a time. - * - * @name everySeries - * @static - * @memberOf module:Collections - * @method - * @see [async.every]{@link module:Collections.every} - * @alias allSeries - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collection in series. - * The iteratee must complete with a boolean result value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result will be either `true` or `false` - * depending on the values of the async tests. Invoked with (err, result). - * @returns {Promise} a promise, if no callback provided - */ - function everySeries(coll, iteratee, callback) { - return _createTester(bool => !bool, res => !res)(eachOfSeries$1, coll, iteratee, callback) - } - var everySeries$1 = awaitify(everySeries, 3); + // Set config.method + if (config.method) { + config.method = config.method.toLowerCase(); + } else if (this.defaults.method) { + config.method = this.defaults.method.toLowerCase(); + } else { + config.method = 'get'; + } - function filterArray(eachfn, arr, iteratee, callback) { - var truthValues = new Array(arr.length); - eachfn(arr, (x, index, iterCb) => { - iteratee(x, (err, v) => { - truthValues[index] = !!v; - iterCb(err); - }); - }, err => { - if (err) return callback(err); - var results = []; - for (var i = 0; i < arr.length; i++) { - if (truthValues[i]) results.push(arr[i]); - } - callback(null, results); - }); - } + // Hook up interceptors middleware + var chain = [dispatchRequest, undefined]; + var promise = Promise.resolve(config); - function filterGeneric(eachfn, coll, iteratee, callback) { - var results = []; - eachfn(coll, (x, index, iterCb) => { - iteratee(x, (err, v) => { - if (err) return iterCb(err); - if (v) { - results.push({index, value: x}); - } - iterCb(err); - }); - }, err => { - if (err) return callback(err); - callback(null, results - .sort((a, b) => a.index - b.index) - .map(v => v.value)); - }); - } + this.interceptors.request.forEach(function unshiftRequestInterceptors(interceptor) { + chain.unshift(interceptor.fulfilled, interceptor.rejected); + }); - function _filter(eachfn, coll, iteratee, callback) { - var filter = isArrayLike(coll) ? filterArray : filterGeneric; - return filter(eachfn, coll, wrapAsync(iteratee), callback); - } + this.interceptors.response.forEach(function pushResponseInterceptors(interceptor) { + chain.push(interceptor.fulfilled, interceptor.rejected); + }); - /** - * Returns a new array of all the values in `coll` which pass an async truth - * test. This operation is performed in parallel, but the results array will be - * in the same order as the original. - * - * @name filter - * @static - * @memberOf module:Collections - * @method - * @alias select - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {Function} iteratee - A truth test to apply to each item in `coll`. - * The `iteratee` is passed a `callback(err, truthValue)`, which must be called - * with a boolean argument once it has completed. Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results). - * @returns {Promise} a promise, if no callback provided - * @example - * - * async.filter(['file1','file2','file3'], function(filePath, callback) { - * fs.access(filePath, function(err) { - * callback(null, !err) - * }); - * }, function(err, results) { - * // results now equals an array of the existing files - * }); - */ - function filter (coll, iteratee, callback) { - return _filter(eachOf$1, coll, iteratee, callback) - } - var filter$1 = awaitify(filter, 3); + while (chain.length) { + promise = promise.then(chain.shift(), chain.shift()); + } - /** - * The same as [`filter`]{@link module:Collections.filter} but runs a maximum of `limit` async operations at a - * time. - * - * @name filterLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.filter]{@link module:Collections.filter} - * @alias selectLimit - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {Function} iteratee - A truth test to apply to each item in `coll`. - * The `iteratee` is passed a `callback(err, truthValue)`, which must be called - * with a boolean argument once it has completed. Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results). - * @returns {Promise} a promise, if no callback provided - */ - function filterLimit (coll, limit, iteratee, callback) { - return _filter(eachOfLimit(limit), coll, iteratee, callback) - } - var filterLimit$1 = awaitify(filterLimit, 4); + return promise; +}; - /** - * The same as [`filter`]{@link module:Collections.filter} but runs only a single async operation at a time. - * - * @name filterSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.filter]{@link module:Collections.filter} - * @alias selectSeries - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {Function} iteratee - A truth test to apply to each item in `coll`. - * The `iteratee` is passed a `callback(err, truthValue)`, which must be called - * with a boolean argument once it has completed. Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results) - * @returns {Promise} a promise, if no callback provided - */ - function filterSeries (coll, iteratee, callback) { - return _filter(eachOfSeries$1, coll, iteratee, callback) - } - var filterSeries$1 = awaitify(filterSeries, 3); +Axios.prototype.getUri = function getUri(config) { + config = mergeConfig(this.defaults, config); + return buildURL(config.url, config.params, config.paramsSerializer).replace(/^\?/, ''); +}; - /** - * Calls the asynchronous function `fn` with a callback parameter that allows it - * to call itself again, in series, indefinitely. +// Provide aliases for supported request methods +utils.forEach(['delete', 'get', 'head', 'options'], function forEachMethodNoData(method) { + /*eslint func-names:0*/ + Axios.prototype[method] = function(url, config) { + return this.request(mergeConfig(config || {}, { + method: method, + url: url, + data: (config || {}).data + })); + }; +}); - * If an error is passed to the callback then `errback` is called with the - * error, and execution stops, otherwise it will never be called. - * - * @name forever - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {AsyncFunction} fn - an async function to call repeatedly. - * Invoked with (next). - * @param {Function} [errback] - when `fn` passes an error to it's callback, - * this function will be called, and execution stops. Invoked with (err). - * @returns {Promise} a promise that rejects if an error occurs and an errback - * is not passed - * @example - * - * async.forever( - * function(next) { - * // next is suitable for passing to things that need a callback(err [, whatever]); - * // it will result in this function being called again. - * }, - * function(err) { - * // if next is called with a value in its first parameter, it will appear - * // in here as 'err', and execution will stop. - * } - * ); - */ - function forever(fn, errback) { - var done = onlyOnce(errback); - var task = wrapAsync(ensureAsync(fn)); +utils.forEach(['post', 'put', 'patch'], function forEachMethodWithData(method) { + /*eslint func-names:0*/ + Axios.prototype[method] = function(url, data, config) { + return this.request(mergeConfig(config || {}, { + method: method, + url: url, + data: data + })); + }; +}); - function next(err) { - if (err) return done(err); - if (err === false) return; - task(next); - } - return next(); - } - var forever$1 = awaitify(forever, 2); +module.exports = Axios; - /** - * The same as [`groupBy`]{@link module:Collections.groupBy} but runs a maximum of `limit` async operations at a time. - * - * @name groupByLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.groupBy]{@link module:Collections.groupBy} - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with a `key` to group the value under. - * Invoked with (value, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Result is an `Object` whoses - * properties are arrays of values which returned the corresponding key. - * @returns {Promise} a promise, if no callback is passed - */ - function groupByLimit(coll, limit, iteratee, callback) { - var _iteratee = wrapAsync(iteratee); - return mapLimit$1(coll, limit, (val, iterCb) => { - _iteratee(val, (err, key) => { - if (err) return iterCb(err); - return iterCb(err, {key, val}); - }); - }, (err, mapResults) => { - var result = {}; - // from MDN, handle object having an `hasOwnProperty` prop - var {hasOwnProperty} = Object.prototype; - for (var i = 0; i < mapResults.length; i++) { - if (mapResults[i]) { - var {key} = mapResults[i]; - var {val} = mapResults[i]; +/***/ }), - if (hasOwnProperty.call(result, key)) { - result[key].push(val); - } else { - result[key] = [val]; - } - } - } +/***/ 3214: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - return callback(err, result); - }); - } +"use strict"; - var groupByLimit$1 = awaitify(groupByLimit, 4); - /** - * Returns a new object, where each value corresponds to an array of items, from - * `coll`, that returned the corresponding key. That is, the keys of the object - * correspond to the values passed to the `iteratee` callback. - * - * Note: Since this function applies the `iteratee` to each item in parallel, - * there is no guarantee that the `iteratee` functions will complete in order. - * However, the values for each key in the `result` will be in the same order as - * the original `coll`. For Objects, the values will roughly be in the order of - * the original Objects' keys (but this can vary across JavaScript engines). - * - * @name groupBy - * @static - * @memberOf module:Collections - * @method - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with a `key` to group the value under. - * Invoked with (value, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Result is an `Object` whoses - * properties are arrays of values which returned the corresponding key. - * @returns {Promise} a promise, if no callback is passed - * @example - * - * async.groupBy(['userId1', 'userId2', 'userId3'], function(userId, callback) { - * db.findById(userId, function(err, user) { - * if (err) return callback(err); - * return callback(null, user.age); - * }); - * }, function(err, result) { - * // result is object containing the userIds grouped by age - * // e.g. { 30: ['userId1', 'userId3'], 42: ['userId2']}; - * }); - */ - function groupBy (coll, iteratee, callback) { - return groupByLimit$1(coll, Infinity, iteratee, callback) +var utils = __nccwpck_require__(20328); + +function InterceptorManager() { + this.handlers = []; +} + +/** + * Add a new interceptor to the stack + * + * @param {Function} fulfilled The function to handle `then` for a `Promise` + * @param {Function} rejected The function to handle `reject` for a `Promise` + * + * @return {Number} An ID used to remove interceptor later + */ +InterceptorManager.prototype.use = function use(fulfilled, rejected) { + this.handlers.push({ + fulfilled: fulfilled, + rejected: rejected + }); + return this.handlers.length - 1; +}; + +/** + * Remove an interceptor from the stack + * + * @param {Number} id The ID that was returned by `use` + */ +InterceptorManager.prototype.eject = function eject(id) { + if (this.handlers[id]) { + this.handlers[id] = null; + } +}; + +/** + * Iterate over all the registered interceptors + * + * This method is particularly useful for skipping over any + * interceptors that may have become `null` calling `eject`. + * + * @param {Function} fn The function to call for each interceptor + */ +InterceptorManager.prototype.forEach = function forEach(fn) { + utils.forEach(this.handlers, function forEachHandler(h) { + if (h !== null) { + fn(h); } + }); +}; - /** - * The same as [`groupBy`]{@link module:Collections.groupBy} but runs only a single async operation at a time. - * - * @name groupBySeries - * @static - * @memberOf module:Collections - * @method - * @see [async.groupBy]{@link module:Collections.groupBy} - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with a `key` to group the value under. - * Invoked with (value, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Result is an `Object` whoses - * properties are arrays of values which returned the corresponding key. - * @returns {Promise} a promise, if no callback is passed - */ - function groupBySeries (coll, iteratee, callback) { - return groupByLimit$1(coll, 1, iteratee, callback) +module.exports = InterceptorManager; + + +/***/ }), + +/***/ 41934: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var isAbsoluteURL = __nccwpck_require__(41301); +var combineURLs = __nccwpck_require__(57189); + +/** + * Creates a new URL by combining the baseURL with the requestedURL, + * only when the requestedURL is not already an absolute URL. + * If the requestURL is absolute, this function returns the requestedURL untouched. + * + * @param {string} baseURL The base URL + * @param {string} requestedURL Absolute or relative URL to combine + * @returns {string} The combined full path + */ +module.exports = function buildFullPath(baseURL, requestedURL) { + if (baseURL && !isAbsoluteURL(requestedURL)) { + return combineURLs(baseURL, requestedURL); + } + return requestedURL; +}; + + +/***/ }), + +/***/ 15226: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var enhanceError = __nccwpck_require__(21516); + +/** + * Create an Error with the specified message, config, error code, request and response. + * + * @param {string} message The error message. + * @param {Object} config The config. + * @param {string} [code] The error code (for example, 'ECONNABORTED'). + * @param {Object} [request] The request. + * @param {Object} [response] The response. + * @returns {Error} The created error. + */ +module.exports = function createError(message, config, code, request, response) { + var error = new Error(message); + return enhanceError(error, config, code, request, response); +}; + + +/***/ }), + +/***/ 85062: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var utils = __nccwpck_require__(20328); +var transformData = __nccwpck_require__(19812); +var isCancel = __nccwpck_require__(64057); +var defaults = __nccwpck_require__(98190); + +/** + * Throws a `Cancel` if cancellation has been requested. + */ +function throwIfCancellationRequested(config) { + if (config.cancelToken) { + config.cancelToken.throwIfRequested(); + } +} + +/** + * Dispatch a request to the server using the configured adapter. + * + * @param {object} config The config that is to be used for the request + * @returns {Promise} The Promise to be fulfilled + */ +module.exports = function dispatchRequest(config) { + throwIfCancellationRequested(config); + + // Ensure headers exist + config.headers = config.headers || {}; + + // Transform request data + config.data = transformData( + config.data, + config.headers, + config.transformRequest + ); + + // Flatten headers + config.headers = utils.merge( + config.headers.common || {}, + config.headers[config.method] || {}, + config.headers + ); + + utils.forEach( + ['delete', 'get', 'head', 'post', 'put', 'patch', 'common'], + function cleanHeaderConfig(method) { + delete config.headers[method]; } + ); - /** - * Logs the result of an `async` function to the `console`. Only works in - * Node.js or in browsers that support `console.log` and `console.error` (such - * as FF and Chrome). If multiple arguments are returned from the async - * function, `console.log` is called on each argument in order. - * - * @name log - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {AsyncFunction} function - The function you want to eventually apply - * all arguments to. - * @param {...*} arguments... - Any number of arguments to apply to the function. - * @example - * - * // in a module - * var hello = function(name, callback) { - * setTimeout(function() { - * callback(null, 'hello ' + name); - * }, 1000); - * }; - * - * // in the node repl - * node> async.log(hello, 'world'); - * 'hello world' - */ - var log = consoleFunc('log'); + var adapter = config.adapter || defaults.adapter; - /** - * The same as [`mapValues`]{@link module:Collections.mapValues} but runs a maximum of `limit` async operations at a - * time. - * - * @name mapValuesLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.mapValues]{@link module:Collections.mapValues} - * @category Collection - * @param {Object} obj - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - A function to apply to each value and key - * in `coll`. - * The iteratee should complete with the transformed value as its result. - * Invoked with (value, key, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. `result` is a new object consisting - * of each key from `obj`, with each transformed value on the right-hand side. - * Invoked with (err, result). - * @returns {Promise} a promise, if no callback is passed - */ - function mapValuesLimit(obj, limit, iteratee, callback) { - callback = once(callback); - var newObj = {}; - var _iteratee = wrapAsync(iteratee); - return eachOfLimit(limit)(obj, (val, key, next) => { - _iteratee(val, key, (err, result) => { - if (err) return next(err); - newObj[key] = result; - next(err); - }); - }, err => callback(err, newObj)); + return adapter(config).then(function onAdapterResolution(response) { + throwIfCancellationRequested(config); + + // Transform response data + response.data = transformData( + response.data, + response.headers, + config.transformResponse + ); + + return response; + }, function onAdapterRejection(reason) { + if (!isCancel(reason)) { + throwIfCancellationRequested(config); + + // Transform response data + if (reason && reason.response) { + reason.response.data = transformData( + reason.response.data, + reason.response.headers, + config.transformResponse + ); + } } - var mapValuesLimit$1 = awaitify(mapValuesLimit, 4); + return Promise.reject(reason); + }); +}; - /** - * A relative of [`map`]{@link module:Collections.map}, designed for use with objects. - * - * Produces a new Object by mapping each value of `obj` through the `iteratee` - * function. The `iteratee` is called each `value` and `key` from `obj` and a - * callback for when it has finished processing. Each of these callbacks takes - * two arguments: an `error`, and the transformed item from `obj`. If `iteratee` - * passes an error to its callback, the main `callback` (for the `mapValues` - * function) is immediately called with the error. - * - * Note, the order of the keys in the result is not guaranteed. The keys will - * be roughly in the order they complete, (but this is very engine-specific) - * - * @name mapValues - * @static - * @memberOf module:Collections - * @method - * @category Collection - * @param {Object} obj - A collection to iterate over. - * @param {AsyncFunction} iteratee - A function to apply to each value and key - * in `coll`. - * The iteratee should complete with the transformed value as its result. - * Invoked with (value, key, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. `result` is a new object consisting - * of each key from `obj`, with each transformed value on the right-hand side. - * Invoked with (err, result). - * @returns {Promise} a promise, if no callback is passed - * @example - * - * async.mapValues({ - * f1: 'file1', - * f2: 'file2', - * f3: 'file3' - * }, function (file, key, callback) { - * fs.stat(file, callback); - * }, function(err, result) { - * // result is now a map of stats for each file, e.g. - * // { - * // f1: [stats for file1], - * // f2: [stats for file2], - * // f3: [stats for file3] - * // } - * }); - */ - function mapValues(obj, iteratee, callback) { - return mapValuesLimit$1(obj, Infinity, iteratee, callback) + +/***/ }), + +/***/ 21516: +/***/ ((module) => { + +"use strict"; + + +/** + * Update an Error with the specified config, error code, and response. + * + * @param {Error} error The error to update. + * @param {Object} config The config. + * @param {string} [code] The error code (for example, 'ECONNABORTED'). + * @param {Object} [request] The request. + * @param {Object} [response] The response. + * @returns {Error} The error. + */ +module.exports = function enhanceError(error, config, code, request, response) { + error.config = config; + if (code) { + error.code = code; + } + + error.request = request; + error.response = response; + error.isAxiosError = true; + + error.toJSON = function toJSON() { + return { + // Standard + message: this.message, + name: this.name, + // Microsoft + description: this.description, + number: this.number, + // Mozilla + fileName: this.fileName, + lineNumber: this.lineNumber, + columnNumber: this.columnNumber, + stack: this.stack, + // Axios + config: this.config, + code: this.code + }; + }; + return error; +}; + + +/***/ }), + +/***/ 74831: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var utils = __nccwpck_require__(20328); + +/** + * Config-specific merge-function which creates a new config-object + * by merging two configuration objects together. + * + * @param {Object} config1 + * @param {Object} config2 + * @returns {Object} New object resulting from merging config2 to config1 + */ +module.exports = function mergeConfig(config1, config2) { + // eslint-disable-next-line no-param-reassign + config2 = config2 || {}; + var config = {}; + + var valueFromConfig2Keys = ['url', 'method', 'data']; + var mergeDeepPropertiesKeys = ['headers', 'auth', 'proxy', 'params']; + var defaultToConfig2Keys = [ + 'baseURL', 'transformRequest', 'transformResponse', 'paramsSerializer', + 'timeout', 'timeoutMessage', 'withCredentials', 'adapter', 'responseType', 'xsrfCookieName', + 'xsrfHeaderName', 'onUploadProgress', 'onDownloadProgress', 'decompress', + 'maxContentLength', 'maxBodyLength', 'maxRedirects', 'transport', 'httpAgent', + 'httpsAgent', 'cancelToken', 'socketPath', 'responseEncoding' + ]; + var directMergeKeys = ['validateStatus']; + + function getMergedValue(target, source) { + if (utils.isPlainObject(target) && utils.isPlainObject(source)) { + return utils.merge(target, source); + } else if (utils.isPlainObject(source)) { + return utils.merge({}, source); + } else if (utils.isArray(source)) { + return source.slice(); } + return source; + } - /** - * The same as [`mapValues`]{@link module:Collections.mapValues} but runs only a single async operation at a time. - * - * @name mapValuesSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.mapValues]{@link module:Collections.mapValues} - * @category Collection - * @param {Object} obj - A collection to iterate over. - * @param {AsyncFunction} iteratee - A function to apply to each value and key - * in `coll`. - * The iteratee should complete with the transformed value as its result. - * Invoked with (value, key, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. `result` is a new object consisting - * of each key from `obj`, with each transformed value on the right-hand side. - * Invoked with (err, result). - * @returns {Promise} a promise, if no callback is passed - */ - function mapValuesSeries(obj, iteratee, callback) { - return mapValuesLimit$1(obj, 1, iteratee, callback) + function mergeDeepProperties(prop) { + if (!utils.isUndefined(config2[prop])) { + config[prop] = getMergedValue(config1[prop], config2[prop]); + } else if (!utils.isUndefined(config1[prop])) { + config[prop] = getMergedValue(undefined, config1[prop]); } + } - /** - * Caches the results of an async function. When creating a hash to store - * function results against, the callback is omitted from the hash and an - * optional hash function can be used. - * - * **Note: if the async function errs, the result will not be cached and - * subsequent calls will call the wrapped function.** - * - * If no hash function is specified, the first argument is used as a hash key, - * which may work reasonably if it is a string or a data type that converts to a - * distinct string. Note that objects and arrays will not behave reasonably. - * Neither will cases where the other arguments are significant. In such cases, - * specify your own hash function. - * - * The cache of results is exposed as the `memo` property of the function - * returned by `memoize`. - * - * @name memoize - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {AsyncFunction} fn - The async function to proxy and cache results from. - * @param {Function} hasher - An optional function for generating a custom hash - * for storing results. It has all the arguments applied to it apart from the - * callback, and must be synchronous. - * @returns {AsyncFunction} a memoized version of `fn` - * @example - * - * var slow_fn = function(name, callback) { - * // do something - * callback(null, result); - * }; - * var fn = async.memoize(slow_fn); - * - * // fn can now be used as if it were slow_fn - * fn('some name', function() { - * // callback - * }); - */ - function memoize(fn, hasher = v => v) { - var memo = Object.create(null); - var queues = Object.create(null); - var _fn = wrapAsync(fn); - var memoized = initialParams((args, callback) => { - var key = hasher(...args); - if (key in memo) { - setImmediate$1(() => callback(null, ...memo[key])); - } else if (key in queues) { - queues[key].push(callback); - } else { - queues[key] = [callback]; - _fn(...args, (err, ...resultArgs) => { - // #1465 don't memoize if an error occurred - if (!err) { - memo[key] = resultArgs; - } - var q = queues[key]; - delete queues[key]; - for (var i = 0, l = q.length; i < l; i++) { - q[i](err, ...resultArgs); - } - }); - } - }); - memoized.memo = memo; - memoized.unmemoized = fn; - return memoized; + utils.forEach(valueFromConfig2Keys, function valueFromConfig2(prop) { + if (!utils.isUndefined(config2[prop])) { + config[prop] = getMergedValue(undefined, config2[prop]); } + }); - /** - * Calls `callback` on a later loop around the event loop. In Node.js this just - * calls `process.nextTick`. In the browser it will use `setImmediate` if - * available, otherwise `setTimeout(callback, 0)`, which means other higher - * priority events may precede the execution of `callback`. - * - * This is used internally for browser-compatibility purposes. - * - * @name nextTick - * @static - * @memberOf module:Utils - * @method - * @see [async.setImmediate]{@link module:Utils.setImmediate} - * @category Util - * @param {Function} callback - The function to call on a later loop around - * the event loop. Invoked with (args...). - * @param {...*} args... - any number of additional arguments to pass to the - * callback on the next tick. - * @example - * - * var call_order = []; - * async.nextTick(function() { - * call_order.push('two'); - * // call_order now equals ['one','two'] - * }); - * call_order.push('one'); - * - * async.setImmediate(function (a, b, c) { - * // a, b, and c equal 1, 2, and 3 - * }, 1, 2, 3); - */ - var _defer$1; + utils.forEach(mergeDeepPropertiesKeys, mergeDeepProperties); - if (hasNextTick) { - _defer$1 = process.nextTick; - } else if (hasSetImmediate) { - _defer$1 = setImmediate; - } else { - _defer$1 = fallback; + utils.forEach(defaultToConfig2Keys, function defaultToConfig2(prop) { + if (!utils.isUndefined(config2[prop])) { + config[prop] = getMergedValue(undefined, config2[prop]); + } else if (!utils.isUndefined(config1[prop])) { + config[prop] = getMergedValue(undefined, config1[prop]); } + }); - var nextTick = wrap(_defer$1); + utils.forEach(directMergeKeys, function merge(prop) { + if (prop in config2) { + config[prop] = getMergedValue(config1[prop], config2[prop]); + } else if (prop in config1) { + config[prop] = getMergedValue(undefined, config1[prop]); + } + }); - var parallel = awaitify((eachfn, tasks, callback) => { - var results = isArrayLike(tasks) ? [] : {}; + var axiosKeys = valueFromConfig2Keys + .concat(mergeDeepPropertiesKeys) + .concat(defaultToConfig2Keys) + .concat(directMergeKeys); - eachfn(tasks, (task, key, taskCb) => { - wrapAsync(task)((err, ...result) => { - if (result.length < 2) { - [result] = result; - } - results[key] = result; - taskCb(err); - }); - }, err => callback(err, results)); - }, 3); + var otherKeys = Object + .keys(config1) + .concat(Object.keys(config2)) + .filter(function filterAxiosKeys(key) { + return axiosKeys.indexOf(key) === -1; + }); - /** - * Run the `tasks` collection of functions in parallel, without waiting until - * the previous function has completed. If any of the functions pass an error to - * its callback, the main `callback` is immediately called with the value of the - * error. Once the `tasks` have completed, the results are passed to the final - * `callback` as an array. - * - * **Note:** `parallel` is about kicking-off I/O tasks in parallel, not about - * parallel execution of code. If your tasks do not use any timers or perform - * any I/O, they will actually be executed in series. Any synchronous setup - * sections for each task will happen one after the other. JavaScript remains - * single-threaded. - * - * **Hint:** Use [`reflect`]{@link module:Utils.reflect} to continue the - * execution of other tasks when a task fails. - * - * It is also possible to use an object instead of an array. Each property will - * be run as a function and the results will be passed to the final `callback` - * as an object instead of an array. This can be a more readable way of handling - * results from {@link async.parallel}. - * - * @name parallel - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of - * [async functions]{@link AsyncFunction} to run. - * Each async function can complete with any number of optional `result` values. - * @param {Function} [callback] - An optional callback to run once all the - * functions have completed successfully. This function gets a results array - * (or object) containing all the result arguments passed to the task callbacks. - * Invoked with (err, results). - * @returns {Promise} a promise, if a callback is not passed - * - * @example - * async.parallel([ - * function(callback) { - * setTimeout(function() { - * callback(null, 'one'); - * }, 200); - * }, - * function(callback) { - * setTimeout(function() { - * callback(null, 'two'); - * }, 100); - * } - * ], - * // optional callback - * function(err, results) { - * // the results array will equal ['one','two'] even though - * // the second function had a shorter timeout. - * }); - * - * // an example using an object instead of an array - * async.parallel({ - * one: function(callback) { - * setTimeout(function() { - * callback(null, 1); - * }, 200); - * }, - * two: function(callback) { - * setTimeout(function() { - * callback(null, 2); - * }, 100); - * } - * }, function(err, results) { - * // results is now equals to: {one: 1, two: 2} - * }); - */ - function parallel$1(tasks, callback) { - return parallel(eachOf$1, tasks, callback); - } + utils.forEach(otherKeys, mergeDeepProperties); - /** - * The same as [`parallel`]{@link module:ControlFlow.parallel} but runs a maximum of `limit` async operations at a - * time. - * - * @name parallelLimit - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.parallel]{@link module:ControlFlow.parallel} - * @category Control Flow - * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of - * [async functions]{@link AsyncFunction} to run. - * Each async function can complete with any number of optional `result` values. - * @param {number} limit - The maximum number of async operations at a time. - * @param {Function} [callback] - An optional callback to run once all the - * functions have completed successfully. This function gets a results array - * (or object) containing all the result arguments passed to the task callbacks. - * Invoked with (err, results). - * @returns {Promise} a promise, if a callback is not passed - */ - function parallelLimit(tasks, limit, callback) { - return parallel(eachOfLimit(limit), tasks, callback); - } + return config; +}; - /** - * A queue of tasks for the worker function to complete. - * @typedef {Iterable} QueueObject - * @memberOf module:ControlFlow - * @property {Function} length - a function returning the number of items - * waiting to be processed. Invoke with `queue.length()`. - * @property {boolean} started - a boolean indicating whether or not any - * items have been pushed and processed by the queue. - * @property {Function} running - a function returning the number of items - * currently being processed. Invoke with `queue.running()`. - * @property {Function} workersList - a function returning the array of items - * currently being processed. Invoke with `queue.workersList()`. - * @property {Function} idle - a function returning false if there are items - * waiting or being processed, or true if not. Invoke with `queue.idle()`. - * @property {number} concurrency - an integer for determining how many `worker` - * functions should be run in parallel. This property can be changed after a - * `queue` is created to alter the concurrency on-the-fly. - * @property {number} payload - an integer that specifies how many items are - * passed to the worker function at a time. only applies if this is a - * [cargo]{@link module:ControlFlow.cargo} object - * @property {AsyncFunction} push - add a new task to the `queue`. Calls `callback` - * once the `worker` has finished processing the task. Instead of a single task, - * a `tasks` array can be submitted. The respective callback is used for every - * task in the list. Invoke with `queue.push(task, [callback])`, - * @property {AsyncFunction} unshift - add a new task to the front of the `queue`. - * Invoke with `queue.unshift(task, [callback])`. - * @property {AsyncFunction} pushAsync - the same as `q.push`, except this returns - * a promise that rejects if an error occurs. - * @property {AsyncFunction} unshirtAsync - the same as `q.unshift`, except this returns - * a promise that rejects if an error occurs. - * @property {Function} remove - remove items from the queue that match a test - * function. The test function will be passed an object with a `data` property, - * and a `priority` property, if this is a - * [priorityQueue]{@link module:ControlFlow.priorityQueue} object. - * Invoked with `queue.remove(testFn)`, where `testFn` is of the form - * `function ({data, priority}) {}` and returns a Boolean. - * @property {Function} saturated - a function that sets a callback that is - * called when the number of running workers hits the `concurrency` limit, and - * further tasks will be queued. If the callback is omitted, `q.saturated()` - * returns a promise for the next occurrence. - * @property {Function} unsaturated - a function that sets a callback that is - * called when the number of running workers is less than the `concurrency` & - * `buffer` limits, and further tasks will not be queued. If the callback is - * omitted, `q.unsaturated()` returns a promise for the next occurrence. - * @property {number} buffer - A minimum threshold buffer in order to say that - * the `queue` is `unsaturated`. - * @property {Function} empty - a function that sets a callback that is called - * when the last item from the `queue` is given to a `worker`. If the callback - * is omitted, `q.empty()` returns a promise for the next occurrence. - * @property {Function} drain - a function that sets a callback that is called - * when the last item from the `queue` has returned from the `worker`. If the - * callback is omitted, `q.drain()` returns a promise for the next occurrence. - * @property {Function} error - a function that sets a callback that is called - * when a task errors. Has the signature `function(error, task)`. If the - * callback is omitted, `error()` returns a promise that rejects on the next - * error. - * @property {boolean} paused - a boolean for determining whether the queue is - * in a paused state. - * @property {Function} pause - a function that pauses the processing of tasks - * until `resume()` is called. Invoke with `queue.pause()`. - * @property {Function} resume - a function that resumes the processing of - * queued tasks when the queue is paused. Invoke with `queue.resume()`. - * @property {Function} kill - a function that removes the `drain` callback and - * empties remaining tasks from the queue forcing it to go idle. No more tasks - * should be pushed to the queue after calling this function. Invoke with `queue.kill()`. - * - * @example - * const q = aync.queue(worker, 2) - * q.push(item1) - * q.push(item2) - * q.push(item3) - * // queues are iterable, spread into an array to inspect - * const items = [...q] // [item1, item2, item3] - * // or use for of - * for (let item of q) { - * console.log(item) - * } - * - * q.drain(() => { - * console.log('all done') - * }) - * // or - * await q.drain() - */ - /** - * Creates a `queue` object with the specified `concurrency`. Tasks added to the - * `queue` are processed in parallel (up to the `concurrency` limit). If all - * `worker`s are in progress, the task is queued until one becomes available. - * Once a `worker` completes a `task`, that `task`'s callback is called. - * - * @name queue - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {AsyncFunction} worker - An async function for processing a queued task. - * If you want to handle errors from an individual task, pass a callback to - * `q.push()`. Invoked with (task, callback). - * @param {number} [concurrency=1] - An `integer` for determining how many - * `worker` functions should be run in parallel. If omitted, the concurrency - * defaults to `1`. If the concurrency is `0`, an error is thrown. - * @returns {module:ControlFlow.QueueObject} A queue object to manage the tasks. Callbacks can be - * attached as certain properties to listen for specific events during the - * lifecycle of the queue. - * @example - * - * // create a queue object with concurrency 2 - * var q = async.queue(function(task, callback) { - * console.log('hello ' + task.name); - * callback(); - * }, 2); - * - * // assign a callback - * q.drain(function() { - * console.log('all items have been processed'); - * }); - * // or await the end - * await q.drain() - * - * // assign an error callback - * q.error(function(err, task) { - * console.error('task experienced an error'); - * }); - * - * // add some items to the queue - * q.push({name: 'foo'}, function(err) { - * console.log('finished processing foo'); - * }); - * // callback is optional - * q.push({name: 'bar'}); - * - * // add some items to the queue (batch-wise) - * q.push([{name: 'baz'},{name: 'bay'},{name: 'bax'}], function(err) { - * console.log('finished processing item'); - * }); - * - * // add some items to the front of the queue - * q.unshift({name: 'bar'}, function (err) { - * console.log('finished processing bar'); - * }); - */ - function queue$1 (worker, concurrency) { - var _worker = wrapAsync(worker); - return queue((items, cb) => { - _worker(items[0], cb); - }, concurrency, 1); - } +/***/ }), - // Binary min-heap implementation used for priority queue. - // Implementation is stable, i.e. push time is considered for equal priorities - class Heap { - constructor() { - this.heap = []; - this.pushCount = Number.MIN_SAFE_INTEGER; - } +/***/ 13211: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - get length() { - return this.heap.length; - } +"use strict"; - empty () { - this.heap = []; - return this; - } - percUp(index) { - let p; +var createError = __nccwpck_require__(15226); - while (index > 0 && smaller(this.heap[index], this.heap[p=parent(index)])) { - let t = this.heap[index]; - this.heap[index] = this.heap[p]; - this.heap[p] = t; +/** + * Resolve or reject a Promise based on response status. + * + * @param {Function} resolve A function that resolves the promise. + * @param {Function} reject A function that rejects the promise. + * @param {object} response The response. + */ +module.exports = function settle(resolve, reject, response) { + var validateStatus = response.config.validateStatus; + if (!response.status || !validateStatus || validateStatus(response.status)) { + resolve(response); + } else { + reject(createError( + 'Request failed with status code ' + response.status, + response.config, + null, + response.request, + response + )); + } +}; - index = p; - } - } - percDown(index) { - let l; +/***/ }), - while ((l=leftChi(index)) < this.heap.length) { - if (l+1 < this.heap.length && smaller(this.heap[l+1], this.heap[l])) { - l = l+1; - } +/***/ 19812: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (smaller(this.heap[index], this.heap[l])) { - break; - } +"use strict"; - let t = this.heap[index]; - this.heap[index] = this.heap[l]; - this.heap[l] = t; - index = l; - } - } +var utils = __nccwpck_require__(20328); - push(node) { - node.pushCount = ++this.pushCount; - this.heap.push(node); - this.percUp(this.heap.length-1); - } +/** + * Transform the data for a request or a response + * + * @param {Object|String} data The data to be transformed + * @param {Array} headers The headers for the request or response + * @param {Array|Function} fns A single function or Array of functions + * @returns {*} The resulting transformed data + */ +module.exports = function transformData(data, headers, fns) { + /*eslint no-param-reassign:0*/ + utils.forEach(fns, function transform(fn) { + data = fn(data, headers); + }); - unshift(node) { - return this.heap.push(node); - } + return data; +}; - shift() { - let [top] = this.heap; - this.heap[0] = this.heap[this.heap.length-1]; - this.heap.pop(); - this.percDown(0); +/***/ }), - return top; - } +/***/ 98190: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - toArray() { - return [...this]; - } +"use strict"; - *[Symbol.iterator] () { - for (let i = 0; i < this.heap.length; i++) { - yield this.heap[i].data; - } - } - remove (testFn) { - let j = 0; - for (let i = 0; i < this.heap.length; i++) { - if (!testFn(this.heap[i])) { - this.heap[j] = this.heap[i]; - j++; - } - } +var utils = __nccwpck_require__(20328); +var normalizeHeaderName = __nccwpck_require__(36240); - this.heap.splice(j); +var DEFAULT_CONTENT_TYPE = { + 'Content-Type': 'application/x-www-form-urlencoded' +}; - for (let i = parent(this.heap.length-1); i >= 0; i--) { - this.percDown(i); - } +function setContentTypeIfUnset(headers, value) { + if (!utils.isUndefined(headers) && utils.isUndefined(headers['Content-Type'])) { + headers['Content-Type'] = value; + } +} - return this; - } - } +function getDefaultAdapter() { + var adapter; + if (typeof XMLHttpRequest !== 'undefined') { + // For browsers use XHR adapter + adapter = __nccwpck_require__(3454); + } else if (typeof process !== 'undefined' && Object.prototype.toString.call(process) === '[object process]') { + // For node use HTTP adapter + adapter = __nccwpck_require__(68104); + } + return adapter; +} - function leftChi(i) { - return (i<<1)+1; - } +var defaults = { + adapter: getDefaultAdapter(), - function parent(i) { - return ((i+1)>>1)-1; + transformRequest: [function transformRequest(data, headers) { + normalizeHeaderName(headers, 'Accept'); + normalizeHeaderName(headers, 'Content-Type'); + if (utils.isFormData(data) || + utils.isArrayBuffer(data) || + utils.isBuffer(data) || + utils.isStream(data) || + utils.isFile(data) || + utils.isBlob(data) + ) { + return data; + } + if (utils.isArrayBufferView(data)) { + return data.buffer; + } + if (utils.isURLSearchParams(data)) { + setContentTypeIfUnset(headers, 'application/x-www-form-urlencoded;charset=utf-8'); + return data.toString(); + } + if (utils.isObject(data)) { + setContentTypeIfUnset(headers, 'application/json;charset=utf-8'); + return JSON.stringify(data); } + return data; + }], - function smaller(x, y) { - if (x.priority !== y.priority) { - return x.priority < y.priority; - } - else { - return x.pushCount < y.pushCount; - } + transformResponse: [function transformResponse(data) { + /*eslint no-param-reassign:0*/ + if (typeof data === 'string') { + try { + data = JSON.parse(data); + } catch (e) { /* Ignore */ } } + return data; + }], - /** - * The same as [async.queue]{@link module:ControlFlow.queue} only tasks are assigned a priority and - * completed in ascending priority order. - * - * @name priorityQueue - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.queue]{@link module:ControlFlow.queue} - * @category Control Flow - * @param {AsyncFunction} worker - An async function for processing a queued task. - * If you want to handle errors from an individual task, pass a callback to - * `q.push()`. - * Invoked with (task, callback). - * @param {number} concurrency - An `integer` for determining how many `worker` - * functions should be run in parallel. If omitted, the concurrency defaults to - * `1`. If the concurrency is `0`, an error is thrown. - * @returns {module:ControlFlow.QueueObject} A priorityQueue object to manage the tasks. There are two - * differences between `queue` and `priorityQueue` objects: - * * `push(task, priority, [callback])` - `priority` should be a number. If an - * array of `tasks` is given, all tasks will be assigned the same priority. - * * The `unshift` method was removed. - */ - function priorityQueue(worker, concurrency) { - // Start with a normal queue - var q = queue$1(worker, concurrency); + /** + * A timeout in milliseconds to abort a request. If set to 0 (default) a + * timeout is not created. + */ + timeout: 0, - q._tasks = new Heap(); + xsrfCookieName: 'XSRF-TOKEN', + xsrfHeaderName: 'X-XSRF-TOKEN', - // Override push to accept second parameter representing priority - q.push = function(data, priority = 0, callback = () => {}) { - if (typeof callback !== 'function') { - throw new Error('task callback must be a function'); - } - q.started = true; - if (!Array.isArray(data)) { - data = [data]; - } - if (data.length === 0 && q.idle()) { - // call drain immediately if there are no tasks - return setImmediate$1(() => q.drain()); - } + maxContentLength: -1, + maxBodyLength: -1, - for (var i = 0, l = data.length; i < l; i++) { - var item = { - data: data[i], - priority, - callback - }; + validateStatus: function validateStatus(status) { + return status >= 200 && status < 300; + } +}; - q._tasks.push(item); - } +defaults.headers = { + common: { + 'Accept': 'application/json, text/plain, */*' + } +}; - setImmediate$1(q.process); - }; +utils.forEach(['delete', 'get', 'head'], function forEachMethodNoData(method) { + defaults.headers[method] = {}; +}); - // Remove unshift function - delete q.unshift; +utils.forEach(['post', 'put', 'patch'], function forEachMethodWithData(method) { + defaults.headers[method] = utils.merge(DEFAULT_CONTENT_TYPE); +}); - return q; - } +module.exports = defaults; - /** - * Runs the `tasks` array of functions in parallel, without waiting until the - * previous function has completed. Once any of the `tasks` complete or pass an - * error to its callback, the main `callback` is immediately called. It's - * equivalent to `Promise.race()`. - * - * @name race - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Array} tasks - An array containing [async functions]{@link AsyncFunction} - * to run. Each function can complete with an optional `result` value. - * @param {Function} callback - A callback to run once any of the functions have - * completed. This function gets an error or result from the first function that - * completed. Invoked with (err, result). - * @returns undefined - * @example - * - * async.race([ - * function(callback) { - * setTimeout(function() { - * callback(null, 'one'); - * }, 200); - * }, - * function(callback) { - * setTimeout(function() { - * callback(null, 'two'); - * }, 100); - * } - * ], - * // main callback - * function(err, result) { - * // the result will be equal to 'two' as it finishes earlier - * }); - */ - function race(tasks, callback) { - callback = once(callback); - if (!Array.isArray(tasks)) return callback(new TypeError('First argument to race must be an array of functions')); - if (!tasks.length) return callback(); - for (var i = 0, l = tasks.length; i < l; i++) { - wrapAsync(tasks[i])(callback); - } - } - var race$1 = awaitify(race, 2); +/***/ }), - /** - * Same as [`reduce`]{@link module:Collections.reduce}, only operates on `array` in reverse order. - * - * @name reduceRight - * @static - * @memberOf module:Collections - * @method - * @see [async.reduce]{@link module:Collections.reduce} - * @alias foldr - * @category Collection - * @param {Array} array - A collection to iterate over. - * @param {*} memo - The initial state of the reduction. - * @param {AsyncFunction} iteratee - A function applied to each item in the - * array to produce the next step in the reduction. - * The `iteratee` should complete with the next state of the reduction. - * If the iteratee complete with an error, the reduction is stopped and the - * main `callback` is immediately called with the error. - * Invoked with (memo, item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result is the reduced value. Invoked with - * (err, result). - * @returns {Promise} a promise, if no callback is passed - */ - function reduceRight (array, memo, iteratee, callback) { - var reversed = [...array].reverse(); - return reduce$1(reversed, memo, iteratee, callback); - } +/***/ 77065: +/***/ ((module) => { - /** - * Wraps the async function in another function that always completes with a - * result object, even when it errors. - * - * The result object has either the property `error` or `value`. - * - * @name reflect - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {AsyncFunction} fn - The async function you want to wrap - * @returns {Function} - A function that always passes null to it's callback as - * the error. The second argument to the callback will be an `object` with - * either an `error` or a `value` property. - * @example - * - * async.parallel([ - * async.reflect(function(callback) { - * // do some stuff ... - * callback(null, 'one'); - * }), - * async.reflect(function(callback) { - * // do some more stuff but error ... - * callback('bad stuff happened'); - * }), - * async.reflect(function(callback) { - * // do some more stuff ... - * callback(null, 'two'); - * }) - * ], - * // optional callback - * function(err, results) { - * // values - * // results[0].value = 'one' - * // results[1].error = 'bad stuff happened' - * // results[2].value = 'two' - * }); - */ - function reflect(fn) { - var _fn = wrapAsync(fn); - return initialParams(function reflectOn(args, reflectCallback) { - args.push((error, ...cbArgs) => { - let retVal = {}; - if (error) { - retVal.error = error; - } - if (cbArgs.length > 0){ - var value = cbArgs; - if (cbArgs.length <= 1) { - [value] = cbArgs; - } - retVal.value = value; - } - reflectCallback(null, retVal); - }); +"use strict"; - return _fn.apply(this, args); - }); - } - /** - * A helper function that wraps an array or an object of functions with `reflect`. - * - * @name reflectAll - * @static - * @memberOf module:Utils - * @method - * @see [async.reflect]{@link module:Utils.reflect} - * @category Util - * @param {Array|Object|Iterable} tasks - The collection of - * [async functions]{@link AsyncFunction} to wrap in `async.reflect`. - * @returns {Array} Returns an array of async functions, each wrapped in - * `async.reflect` - * @example - * - * let tasks = [ - * function(callback) { - * setTimeout(function() { - * callback(null, 'one'); - * }, 200); - * }, - * function(callback) { - * // do some more stuff but error ... - * callback(new Error('bad stuff happened')); - * }, - * function(callback) { - * setTimeout(function() { - * callback(null, 'two'); - * }, 100); - * } - * ]; - * - * async.parallel(async.reflectAll(tasks), - * // optional callback - * function(err, results) { - * // values - * // results[0].value = 'one' - * // results[1].error = Error('bad stuff happened') - * // results[2].value = 'two' - * }); - * - * // an example using an object instead of an array - * let tasks = { - * one: function(callback) { - * setTimeout(function() { - * callback(null, 'one'); - * }, 200); - * }, - * two: function(callback) { - * callback('two'); - * }, - * three: function(callback) { - * setTimeout(function() { - * callback(null, 'three'); - * }, 100); - * } - * }; - * - * async.parallel(async.reflectAll(tasks), - * // optional callback - * function(err, results) { - * // values - * // results.one.value = 'one' - * // results.two.error = 'two' - * // results.three.value = 'three' - * }); - */ - function reflectAll(tasks) { - var results; - if (Array.isArray(tasks)) { - results = tasks.map(reflect); - } else { - results = {}; - Object.keys(tasks).forEach(key => { - results[key] = reflect.call(this, tasks[key]); - }); - } - return results; +module.exports = function bind(fn, thisArg) { + return function wrap() { + var args = new Array(arguments.length); + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i]; } + return fn.apply(thisArg, args); + }; +}; - function reject(eachfn, arr, _iteratee, callback) { - const iteratee = wrapAsync(_iteratee); - return _filter(eachfn, arr, (value, cb) => { - iteratee(value, (err, v) => { - cb(err, !v); - }); - }, callback); - } - /** - * The opposite of [`filter`]{@link module:Collections.filter}. Removes values that pass an `async` truth test. - * - * @name reject - * @static - * @memberOf module:Collections - * @method - * @see [async.filter]{@link module:Collections.filter} - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {Function} iteratee - An async truth test to apply to each item in - * `coll`. - * The should complete with a boolean value as its `result`. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results). - * @returns {Promise} a promise, if no callback is passed - * @example - * - * async.reject(['file1','file2','file3'], function(filePath, callback) { - * fs.access(filePath, function(err) { - * callback(null, !err) - * }); - * }, function(err, results) { - * // results now equals an array of missing files - * createFiles(results); - * }); - */ - function reject$1 (coll, iteratee, callback) { - return reject(eachOf$1, coll, iteratee, callback) - } - var reject$2 = awaitify(reject$1, 3); +/***/ }), - /** - * The same as [`reject`]{@link module:Collections.reject} but runs a maximum of `limit` async operations at a - * time. - * - * @name rejectLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.reject]{@link module:Collections.reject} - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {Function} iteratee - An async truth test to apply to each item in - * `coll`. - * The should complete with a boolean value as its `result`. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results). - * @returns {Promise} a promise, if no callback is passed - */ - function rejectLimit (coll, limit, iteratee, callback) { - return reject(eachOfLimit(limit), coll, iteratee, callback) - } - var rejectLimit$1 = awaitify(rejectLimit, 4); +/***/ 30646: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - /** - * The same as [`reject`]{@link module:Collections.reject} but runs only a single async operation at a time. - * - * @name rejectSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.reject]{@link module:Collections.reject} - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {Function} iteratee - An async truth test to apply to each item in - * `coll`. - * The should complete with a boolean value as its `result`. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results). - * @returns {Promise} a promise, if no callback is passed - */ - function rejectSeries (coll, iteratee, callback) { - return reject(eachOfSeries$1, coll, iteratee, callback) - } - var rejectSeries$1 = awaitify(rejectSeries, 3); +"use strict"; - function constant$1(value) { - return function () { - return value; - } - } - /** - * Attempts to get a successful response from `task` no more than `times` times - * before returning an error. If the task is successful, the `callback` will be - * passed the result of the successful task. If all attempts fail, the callback - * will be passed the error and result (if any) of the final attempt. - * - * @name retry - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @see [async.retryable]{@link module:ControlFlow.retryable} - * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - Can be either an - * object with `times` and `interval` or a number. - * * `times` - The number of attempts to make before giving up. The default - * is `5`. - * * `interval` - The time to wait between retries, in milliseconds. The - * default is `0`. The interval may also be specified as a function of the - * retry count (see example). - * * `errorFilter` - An optional synchronous function that is invoked on - * erroneous result. If it returns `true` the retry attempts will continue; - * if the function returns `false` the retry flow is aborted with the current - * attempt's error and result being returned to the final callback. - * Invoked with (err). - * * If `opts` is a number, the number specifies the number of times to retry, - * with the default interval of `0`. - * @param {AsyncFunction} task - An async function to retry. - * Invoked with (callback). - * @param {Function} [callback] - An optional callback which is called when the - * task has succeeded, or after the final failed attempt. It receives the `err` - * and `result` arguments of the last attempt at completing the `task`. Invoked - * with (err, results). - * @returns {Promise} a promise if no callback provided - * - * @example - * - * // The `retry` function can be used as a stand-alone control flow by passing - * // a callback, as shown below: - * - * // try calling apiMethod 3 times - * async.retry(3, apiMethod, function(err, result) { - * // do something with the result - * }); - * - * // try calling apiMethod 3 times, waiting 200 ms between each retry - * async.retry({times: 3, interval: 200}, apiMethod, function(err, result) { - * // do something with the result - * }); - * - * // try calling apiMethod 10 times with exponential backoff - * // (i.e. intervals of 100, 200, 400, 800, 1600, ... milliseconds) - * async.retry({ - * times: 10, - * interval: function(retryCount) { - * return 50 * Math.pow(2, retryCount); - * } - * }, apiMethod, function(err, result) { - * // do something with the result - * }); - * - * // try calling apiMethod the default 5 times no delay between each retry - * async.retry(apiMethod, function(err, result) { - * // do something with the result - * }); - * - * // try calling apiMethod only when error condition satisfies, all other - * // errors will abort the retry control flow and return to final callback - * async.retry({ - * errorFilter: function(err) { - * return err.message === 'Temporary error'; // only retry on a specific error - * } - * }, apiMethod, function(err, result) { - * // do something with the result - * }); - * - * // to retry individual methods that are not as reliable within other - * // control flow functions, use the `retryable` wrapper: - * async.auto({ - * users: api.getUsers.bind(api), - * payments: async.retryable(3, api.getPayments.bind(api)) - * }, function(err, results) { - * // do something with the results - * }); - * - */ - const DEFAULT_TIMES = 5; - const DEFAULT_INTERVAL = 0; +var utils = __nccwpck_require__(20328); - function retry(opts, task, callback) { - var options = { - times: DEFAULT_TIMES, - intervalFunc: constant$1(DEFAULT_INTERVAL) - }; +function encode(val) { + return encodeURIComponent(val). + replace(/%3A/gi, ':'). + replace(/%24/g, '$'). + replace(/%2C/gi, ','). + replace(/%20/g, '+'). + replace(/%5B/gi, '['). + replace(/%5D/gi, ']'); +} - if (arguments.length < 3 && typeof opts === 'function') { - callback = task || promiseCallback(); - task = opts; - } else { - parseTimes(options, opts); - callback = callback || promiseCallback(); - } +/** + * Build a URL by appending params to the end + * + * @param {string} url The base of the url (e.g., http://www.google.com) + * @param {object} [params] The params to be appended + * @returns {string} The formatted url + */ +module.exports = function buildURL(url, params, paramsSerializer) { + /*eslint no-param-reassign:0*/ + if (!params) { + return url; + } - if (typeof task !== 'function') { - throw new Error("Invalid arguments for async.retry"); - } + var serializedParams; + if (paramsSerializer) { + serializedParams = paramsSerializer(params); + } else if (utils.isURLSearchParams(params)) { + serializedParams = params.toString(); + } else { + var parts = []; - var _task = wrapAsync(task); + utils.forEach(params, function serialize(val, key) { + if (val === null || typeof val === 'undefined') { + return; + } - var attempt = 1; - function retryAttempt() { - _task((err, ...args) => { - if (err === false) return - if (err && attempt++ < options.times && - (typeof options.errorFilter != 'function' || - options.errorFilter(err))) { - setTimeout(retryAttempt, options.intervalFunc(attempt - 1)); - } else { - callback(err, ...args); - } - }); + if (utils.isArray(val)) { + key = key + '[]'; + } else { + val = [val]; + } + + utils.forEach(val, function parseValue(v) { + if (utils.isDate(v)) { + v = v.toISOString(); + } else if (utils.isObject(v)) { + v = JSON.stringify(v); } + parts.push(encode(key) + '=' + encode(v)); + }); + }); - retryAttempt(); - return callback[PROMISE_SYMBOL] + serializedParams = parts.join('&'); + } + + if (serializedParams) { + var hashmarkIndex = url.indexOf('#'); + if (hashmarkIndex !== -1) { + url = url.slice(0, hashmarkIndex); } - function parseTimes(acc, t) { - if (typeof t === 'object') { - acc.times = +t.times || DEFAULT_TIMES; + url += (url.indexOf('?') === -1 ? '?' : '&') + serializedParams; + } - acc.intervalFunc = typeof t.interval === 'function' ? - t.interval : - constant$1(+t.interval || DEFAULT_INTERVAL); + return url; +}; - acc.errorFilter = t.errorFilter; - } else if (typeof t === 'number' || typeof t === 'string') { - acc.times = +t || DEFAULT_TIMES; - } else { - throw new Error("Invalid arguments for async.retry"); - } - } - /** - * A close relative of [`retry`]{@link module:ControlFlow.retry}. This method - * wraps a task and makes it retryable, rather than immediately calling it - * with retries. - * - * @name retryable - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.retry]{@link module:ControlFlow.retry} - * @category Control Flow - * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - optional - * options, exactly the same as from `retry`, except for a `opts.arity` that - * is the arity of the `task` function, defaulting to `task.length` - * @param {AsyncFunction} task - the asynchronous function to wrap. - * This function will be passed any arguments passed to the returned wrapper. - * Invoked with (...args, callback). - * @returns {AsyncFunction} The wrapped function, which when invoked, will - * retry on an error, based on the parameters specified in `opts`. - * This function will accept the same parameters as `task`. - * @example - * - * async.auto({ - * dep1: async.retryable(3, getFromFlakyService), - * process: ["dep1", async.retryable(3, function (results, cb) { - * maybeProcessData(results.dep1, cb); - * })] - * }, callback); - */ - function retryable (opts, task) { - if (!task) { - task = opts; - opts = null; - } - let arity = (opts && opts.arity) || task.length; - if (isAsync(task)) { - arity += 1; - } - var _task = wrapAsync(task); - return initialParams((args, callback) => { - if (args.length < arity - 1 || callback == null) { - args.push(callback); - callback = promiseCallback(); - } - function taskFn(cb) { - _task(...args, cb); - } +/***/ }), - if (opts) retry(opts, taskFn, callback); - else retry(taskFn, callback); +/***/ 57189: +/***/ ((module) => { - return callback[PROMISE_SYMBOL] - }); - } +"use strict"; - /** - * Run the functions in the `tasks` collection in series, each one running once - * the previous function has completed. If any functions in the series pass an - * error to its callback, no more functions are run, and `callback` is - * immediately called with the value of the error. Otherwise, `callback` - * receives an array of results when `tasks` have completed. - * - * It is also possible to use an object instead of an array. Each property will - * be run as a function, and the results will be passed to the final `callback` - * as an object instead of an array. This can be a more readable way of handling - * results from {@link async.series}. - * - * **Note** that while many implementations preserve the order of object - * properties, the [ECMAScript Language Specification](http://www.ecma-international.org/ecma-262/5.1/#sec-8.6) - * explicitly states that - * - * > The mechanics and order of enumerating the properties is not specified. - * - * So if you rely on the order in which your series of functions are executed, - * and want this to work on all platforms, consider using an array. - * - * @name series - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing - * [async functions]{@link AsyncFunction} to run in series. - * Each function can complete with any number of optional `result` values. - * @param {Function} [callback] - An optional callback to run once all the - * functions have completed. This function gets a results array (or object) - * containing all the result arguments passed to the `task` callbacks. Invoked - * with (err, result). - * @return {Promise} a promise, if no callback is passed - * @example - * async.series([ - * function(callback) { - * // do some stuff ... - * callback(null, 'one'); - * }, - * function(callback) { - * // do some more stuff ... - * callback(null, 'two'); - * } - * ], - * // optional callback - * function(err, results) { - * // results is now equal to ['one', 'two'] - * }); - * - * async.series({ - * one: function(callback) { - * setTimeout(function() { - * callback(null, 1); - * }, 200); - * }, - * two: function(callback){ - * setTimeout(function() { - * callback(null, 2); - * }, 100); - * } - * }, function(err, results) { - * // results is now equal to: {one: 1, two: 2} - * }); - */ - function series(tasks, callback) { - return parallel(eachOfSeries$1, tasks, callback); - } - /** - * Returns `true` if at least one element in the `coll` satisfies an async test. - * If any iteratee call returns `true`, the main `callback` is immediately - * called. - * - * @name some - * @static - * @memberOf module:Collections - * @method - * @alias any - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collections in parallel. - * The iteratee should complete with a boolean `result` value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the iteratee functions have finished. - * Result will be either `true` or `false` depending on the values of the async - * tests. Invoked with (err, result). - * @returns {Promise} a promise, if no callback provided - * @example - * - * async.some(['file1','file2','file3'], function(filePath, callback) { - * fs.access(filePath, function(err) { - * callback(null, !err) - * }); - * }, function(err, result) { - * // if result is true then at least one of the files exists - * }); - */ - function some(coll, iteratee, callback) { - return _createTester(Boolean, res => res)(eachOf$1, coll, iteratee, callback) - } - var some$1 = awaitify(some, 3); +/** + * Creates a new URL by combining the specified URLs + * + * @param {string} baseURL The base URL + * @param {string} relativeURL The relative URL + * @returns {string} The combined URL + */ +module.exports = function combineURLs(baseURL, relativeURL) { + return relativeURL + ? baseURL.replace(/\/+$/, '') + '/' + relativeURL.replace(/^\/+/, '') + : baseURL; +}; - /** - * The same as [`some`]{@link module:Collections.some} but runs a maximum of `limit` async operations at a time. - * - * @name someLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.some]{@link module:Collections.some} - * @alias anyLimit - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collections in parallel. - * The iteratee should complete with a boolean `result` value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the iteratee functions have finished. - * Result will be either `true` or `false` depending on the values of the async - * tests. Invoked with (err, result). - * @returns {Promise} a promise, if no callback provided - */ - function someLimit(coll, limit, iteratee, callback) { - return _createTester(Boolean, res => res)(eachOfLimit(limit), coll, iteratee, callback) - } - var someLimit$1 = awaitify(someLimit, 4); - /** - * The same as [`some`]{@link module:Collections.some} but runs only a single async operation at a time. - * - * @name someSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.some]{@link module:Collections.some} - * @alias anySeries - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collections in series. - * The iteratee should complete with a boolean `result` value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the iteratee functions have finished. - * Result will be either `true` or `false` depending on the values of the async - * tests. Invoked with (err, result). - * @returns {Promise} a promise, if no callback provided - */ - function someSeries(coll, iteratee, callback) { - return _createTester(Boolean, res => res)(eachOfSeries$1, coll, iteratee, callback) - } - var someSeries$1 = awaitify(someSeries, 3); +/***/ }), - /** - * Sorts a list by the results of running each `coll` value through an async - * `iteratee`. - * - * @name sortBy - * @static - * @memberOf module:Collections - * @method - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with a value to use as the sort criteria as - * its `result`. - * Invoked with (item, callback). - * @param {Function} callback - A callback which is called after all the - * `iteratee` functions have finished, or an error occurs. Results is the items - * from the original `coll` sorted by the values returned by the `iteratee` - * calls. Invoked with (err, results). - * @returns {Promise} a promise, if no callback passed - * @example - * - * async.sortBy(['file1','file2','file3'], function(file, callback) { - * fs.stat(file, function(err, stats) { - * callback(err, stats.mtime); - * }); - * }, function(err, results) { - * // results is now the original array of files sorted by - * // modified date - * }); - * - * // By modifying the callback parameter the - * // sorting order can be influenced: - * - * // ascending order - * async.sortBy([1,9,3,5], function(x, callback) { - * callback(null, x); - * }, function(err,result) { - * // result callback - * }); - * - * // descending order - * async.sortBy([1,9,3,5], function(x, callback) { - * callback(null, x*-1); //<- x*-1 instead of x, turns the order around - * }, function(err,result) { - * // result callback - * }); - */ - function sortBy (coll, iteratee, callback) { - var _iteratee = wrapAsync(iteratee); - return map$1(coll, (x, iterCb) => { - _iteratee(x, (err, criteria) => { - if (err) return iterCb(err); - iterCb(err, {value: x, criteria}); - }); - }, (err, results) => { - if (err) return callback(err); - callback(null, results.sort(comparator).map(v => v.value)); - }); +/***/ 21545: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - function comparator(left, right) { - var a = left.criteria, b = right.criteria; - return a < b ? -1 : a > b ? 1 : 0; - } - } - var sortBy$1 = awaitify(sortBy, 3); +"use strict"; - /** - * Sets a time limit on an asynchronous function. If the function does not call - * its callback within the specified milliseconds, it will be called with a - * timeout error. The code property for the error object will be `'ETIMEDOUT'`. - * - * @name timeout - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {AsyncFunction} asyncFn - The async function to limit in time. - * @param {number} milliseconds - The specified time limit. - * @param {*} [info] - Any variable you want attached (`string`, `object`, etc) - * to timeout Error for more information.. - * @returns {AsyncFunction} Returns a wrapped function that can be used with any - * of the control flow functions. - * Invoke this function with the same parameters as you would `asyncFunc`. - * @example - * - * function myFunction(foo, callback) { - * doAsyncTask(foo, function(err, data) { - * // handle errors - * if (err) return callback(err); - * - * // do some stuff ... - * - * // return processed data - * return callback(null, data); - * }); - * } - * - * var wrapped = async.timeout(myFunction, 1000); - * - * // call `wrapped` as you would `myFunction` - * wrapped({ bar: 'bar' }, function(err, data) { - * // if `myFunction` takes < 1000 ms to execute, `err` - * // and `data` will have their expected values - * - * // else `err` will be an Error with the code 'ETIMEDOUT' - * }); - */ - function timeout(asyncFn, milliseconds, info) { - var fn = wrapAsync(asyncFn); - return initialParams((args, callback) => { - var timedOut = false; - var timer; +var utils = __nccwpck_require__(20328); - function timeoutCallback() { - var name = asyncFn.name || 'anonymous'; - var error = new Error('Callback function "' + name + '" timed out.'); - error.code = 'ETIMEDOUT'; - if (info) { - error.info = info; - } - timedOut = true; - callback(error); - } +module.exports = ( + utils.isStandardBrowserEnv() ? - args.push((...cbArgs) => { - if (!timedOut) { - callback(...cbArgs); - clearTimeout(timer); - } - }); + // Standard browser envs support document.cookie + (function standardBrowserEnv() { + return { + write: function write(name, value, expires, path, domain, secure) { + var cookie = []; + cookie.push(name + '=' + encodeURIComponent(value)); - // setup timer and call original function - timer = setTimeout(timeoutCallback, milliseconds); - fn(...args); - }); - } + if (utils.isNumber(expires)) { + cookie.push('expires=' + new Date(expires).toGMTString()); + } - function range(size) { - var result = Array(size); - while (size--) { - result[size] = size; - } - return result; - } + if (utils.isString(path)) { + cookie.push('path=' + path); + } - /** - * The same as [times]{@link module:ControlFlow.times} but runs a maximum of `limit` async operations at a - * time. - * - * @name timesLimit - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.times]{@link module:ControlFlow.times} - * @category Control Flow - * @param {number} count - The number of times to run the function. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - The async function to call `n` times. - * Invoked with the iteration index and a callback: (n, next). - * @param {Function} callback - see [async.map]{@link module:Collections.map}. - * @returns {Promise} a promise, if no callback is provided - */ - function timesLimit(count, limit, iteratee, callback) { - var _iteratee = wrapAsync(iteratee); - return mapLimit$1(range(count), limit, _iteratee, callback); - } + if (utils.isString(domain)) { + cookie.push('domain=' + domain); + } - /** - * Calls the `iteratee` function `n` times, and accumulates results in the same - * manner you would use with [map]{@link module:Collections.map}. - * - * @name times - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.map]{@link module:Collections.map} - * @category Control Flow - * @param {number} n - The number of times to run the function. - * @param {AsyncFunction} iteratee - The async function to call `n` times. - * Invoked with the iteration index and a callback: (n, next). - * @param {Function} callback - see {@link module:Collections.map}. - * @returns {Promise} a promise, if no callback is provided - * @example - * - * // Pretend this is some complicated async factory - * var createUser = function(id, callback) { - * callback(null, { - * id: 'user' + id - * }); - * }; - * - * // generate 5 users - * async.times(5, function(n, next) { - * createUser(n, function(err, user) { - * next(err, user); - * }); - * }, function(err, users) { - * // we should now have 5 users - * }); - */ - function times (n, iteratee, callback) { - return timesLimit(n, Infinity, iteratee, callback) - } + if (secure === true) { + cookie.push('secure'); + } - /** - * The same as [times]{@link module:ControlFlow.times} but runs only a single async operation at a time. - * - * @name timesSeries - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.times]{@link module:ControlFlow.times} - * @category Control Flow - * @param {number} n - The number of times to run the function. - * @param {AsyncFunction} iteratee - The async function to call `n` times. - * Invoked with the iteration index and a callback: (n, next). - * @param {Function} callback - see {@link module:Collections.map}. - * @returns {Promise} a promise, if no callback is provided - */ - function timesSeries (n, iteratee, callback) { - return timesLimit(n, 1, iteratee, callback) - } + document.cookie = cookie.join('; '); + }, - /** - * A relative of `reduce`. Takes an Object or Array, and iterates over each - * element in parallel, each step potentially mutating an `accumulator` value. - * The type of the accumulator defaults to the type of collection passed in. - * - * @name transform - * @static - * @memberOf module:Collections - * @method - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {*} [accumulator] - The initial state of the transform. If omitted, - * it will default to an empty Object or Array, depending on the type of `coll` - * @param {AsyncFunction} iteratee - A function applied to each item in the - * collection that potentially modifies the accumulator. - * Invoked with (accumulator, item, key, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result is the transformed accumulator. - * Invoked with (err, result). - * @returns {Promise} a promise, if no callback provided - * @example - * - * async.transform([1,2,3], function(acc, item, index, callback) { - * // pointless async: - * process.nextTick(function() { - * acc[index] = item * 2 - * callback(null) - * }); - * }, function(err, result) { - * // result is now equal to [2, 4, 6] - * }); - * - * @example - * - * async.transform({a: 1, b: 2, c: 3}, function (obj, val, key, callback) { - * setImmediate(function () { - * obj[key] = val * 2; - * callback(); - * }) - * }, function (err, result) { - * // result is equal to {a: 2, b: 4, c: 6} - * }) - */ - function transform (coll, accumulator, iteratee, callback) { - if (arguments.length <= 3 && typeof accumulator === 'function') { - callback = iteratee; - iteratee = accumulator; - accumulator = Array.isArray(coll) ? [] : {}; + read: function read(name) { + var match = document.cookie.match(new RegExp('(^|;\\s*)(' + name + ')=([^;]*)')); + return (match ? decodeURIComponent(match[3]) : null); + }, + + remove: function remove(name) { + this.write(name, '', Date.now() - 86400000); } - callback = once(callback || promiseCallback()); - var _iteratee = wrapAsync(iteratee); + }; + })() : - eachOf$1(coll, (v, k, cb) => { - _iteratee(accumulator, v, k, cb); - }, err => callback(err, accumulator)); - return callback[PROMISE_SYMBOL] - } + // Non standard browser env (web workers, react-native) lack needed support. + (function nonStandardBrowserEnv() { + return { + write: function write() {}, + read: function read() { return null; }, + remove: function remove() {} + }; + })() +); - /** - * It runs each task in series but stops whenever any of the functions were - * successful. If one of the tasks were successful, the `callback` will be - * passed the result of the successful task. If all tasks fail, the callback - * will be passed the error and result (if any) of the final attempt. - * - * @name tryEach - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing functions to - * run, each function is passed a `callback(err, result)` it must call on - * completion with an error `err` (which can be `null`) and an optional `result` - * value. - * @param {Function} [callback] - An optional callback which is called when one - * of the tasks has succeeded, or all have failed. It receives the `err` and - * `result` arguments of the last attempt at completing the `task`. Invoked with - * (err, results). - * @returns {Promise} a promise, if no callback is passed - * @example - * async.tryEach([ - * function getDataFromFirstWebsite(callback) { - * // Try getting the data from the first website - * callback(err, data); - * }, - * function getDataFromSecondWebsite(callback) { - * // First website failed, - * // Try getting the data from the backup website - * callback(err, data); - * } - * ], - * // optional callback - * function(err, results) { - * Now do something with the data. - * }); - * - */ - function tryEach(tasks, callback) { - var error = null; - var result; - return eachSeries$1(tasks, (task, taskCb) => { - wrapAsync(task)((err, ...args) => { - if (err === false) return taskCb(err); - if (args.length < 2) { - [result] = args; - } else { - result = args; - } - error = err; - taskCb(err ? null : {}); - }); - }, () => callback(error, result)); - } +/***/ }), - var tryEach$1 = awaitify(tryEach); +/***/ 41301: +/***/ ((module) => { - /** - * Undoes a [memoize]{@link module:Utils.memoize}d function, reverting it to the original, - * unmemoized form. Handy for testing. - * - * @name unmemoize - * @static - * @memberOf module:Utils - * @method - * @see [async.memoize]{@link module:Utils.memoize} - * @category Util - * @param {AsyncFunction} fn - the memoized function - * @returns {AsyncFunction} a function that calls the original unmemoized function - */ - function unmemoize(fn) { - return (...args) => { - return (fn.unmemoized || fn)(...args); - }; - } +"use strict"; - /** - * Repeatedly call `iteratee`, while `test` returns `true`. Calls `callback` when - * stopped, or an error occurs. - * - * @name whilst - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {AsyncFunction} test - asynchronous truth test to perform before each - * execution of `iteratee`. Invoked with (). - * @param {AsyncFunction} iteratee - An async function which is called each time - * `test` passes. Invoked with (callback). - * @param {Function} [callback] - A callback which is called after the test - * function has failed and repeated execution of `iteratee` has stopped. `callback` - * will be passed an error and any arguments passed to the final `iteratee`'s - * callback. Invoked with (err, [results]); - * @returns {Promise} a promise, if no callback is passed - * @example - * - * var count = 0; - * async.whilst( - * function test(cb) { cb(null, count < 5); }, - * function iter(callback) { - * count++; - * setTimeout(function() { - * callback(null, count); - * }, 1000); - * }, - * function (err, n) { - * // 5 seconds have passed, n = 5 - * } - * ); - */ - function whilst(test, iteratee, callback) { - callback = onlyOnce(callback); - var _fn = wrapAsync(iteratee); - var _test = wrapAsync(test); - var results = []; - function next(err, ...rest) { - if (err) return callback(err); - results = rest; - if (err === false) return; - _test(check); - } +/** + * Determines whether the specified URL is absolute + * + * @param {string} url The URL to test + * @returns {boolean} True if the specified URL is absolute, otherwise false + */ +module.exports = function isAbsoluteURL(url) { + // A URL is considered absolute if it begins with "://" or "//" (protocol-relative URL). + // RFC 3986 defines scheme name as a sequence of characters beginning with a letter and followed + // by any combination of letters, digits, plus, period, or hyphen. + return /^([a-z][a-z\d\+\-\.]*:)?\/\//i.test(url); +}; - function check(err, truth) { - if (err) return callback(err); - if (err === false) return; - if (!truth) return callback(null, ...results); - _fn(next); + +/***/ }), + +/***/ 60650: +/***/ ((module) => { + +"use strict"; + + +/** + * Determines whether the payload is an error thrown by Axios + * + * @param {*} payload The value to test + * @returns {boolean} True if the payload is an error thrown by Axios, otherwise false + */ +module.exports = function isAxiosError(payload) { + return (typeof payload === 'object') && (payload.isAxiosError === true); +}; + + +/***/ }), + +/***/ 33608: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var utils = __nccwpck_require__(20328); + +module.exports = ( + utils.isStandardBrowserEnv() ? + + // Standard browser envs have full support of the APIs needed to test + // whether the request URL is of the same origin as current location. + (function standardBrowserEnv() { + var msie = /(msie|trident)/i.test(navigator.userAgent); + var urlParsingNode = document.createElement('a'); + var originURL; + + /** + * Parse a URL to discover it's components + * + * @param {String} url The URL to be parsed + * @returns {Object} + */ + function resolveURL(url) { + var href = url; + + if (msie) { + // IE needs attribute set twice to normalize properties + urlParsingNode.setAttribute('href', href); + href = urlParsingNode.href; } - return _test(check); - } - var whilst$1 = awaitify(whilst, 3); + urlParsingNode.setAttribute('href', href); - /** - * Repeatedly call `iteratee` until `test` returns `true`. Calls `callback` when - * stopped, or an error occurs. `callback` will be passed an error and any - * arguments passed to the final `iteratee`'s callback. - * - * The inverse of [whilst]{@link module:ControlFlow.whilst}. - * - * @name until - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.whilst]{@link module:ControlFlow.whilst} - * @category Control Flow - * @param {AsyncFunction} test - asynchronous truth test to perform before each - * execution of `iteratee`. Invoked with (callback). - * @param {AsyncFunction} iteratee - An async function which is called each time - * `test` fails. Invoked with (callback). - * @param {Function} [callback] - A callback which is called after the test - * function has passed and repeated execution of `iteratee` has stopped. `callback` - * will be passed an error and any arguments passed to the final `iteratee`'s - * callback. Invoked with (err, [results]); - * @returns {Promise} a promise, if a callback is not passed - * - * @example - * const results = [] - * let finished = false - * async.until(function test(page, cb) { - * cb(null, finished) - * }, function iter(next) { - * fetchPage(url, (err, body) => { - * if (err) return next(err) - * results = results.concat(body.objects) - * finished = !!body.next - * next(err) - * }) - * }, function done (err) { - * // all pages have been fetched - * }) - */ - function until(test, iteratee, callback) { - const _test = wrapAsync(test); - return whilst$1((cb) => _test((err, truth) => cb (err, !truth)), iteratee, callback); + // urlParsingNode provides the UrlUtils interface - http://url.spec.whatwg.org/#urlutils + return { + href: urlParsingNode.href, + protocol: urlParsingNode.protocol ? urlParsingNode.protocol.replace(/:$/, '') : '', + host: urlParsingNode.host, + search: urlParsingNode.search ? urlParsingNode.search.replace(/^\?/, '') : '', + hash: urlParsingNode.hash ? urlParsingNode.hash.replace(/^#/, '') : '', + hostname: urlParsingNode.hostname, + port: urlParsingNode.port, + pathname: (urlParsingNode.pathname.charAt(0) === '/') ? + urlParsingNode.pathname : + '/' + urlParsingNode.pathname + }; + } + + originURL = resolveURL(window.location.href); + + /** + * Determine if a URL shares the same origin as the current location + * + * @param {String} requestURL The URL to test + * @returns {boolean} True if URL shares the same origin, otherwise false + */ + return function isURLSameOrigin(requestURL) { + var parsed = (utils.isString(requestURL)) ? resolveURL(requestURL) : requestURL; + return (parsed.protocol === originURL.protocol && + parsed.host === originURL.host); + }; + })() : + + // Non standard browser envs (web workers, react-native) lack needed support. + (function nonStandardBrowserEnv() { + return function isURLSameOrigin() { + return true; + }; + })() +); + + +/***/ }), + +/***/ 36240: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var utils = __nccwpck_require__(20328); + +module.exports = function normalizeHeaderName(headers, normalizedName) { + utils.forEach(headers, function processHeader(value, name) { + if (name !== normalizedName && name.toUpperCase() === normalizedName.toUpperCase()) { + headers[normalizedName] = value; + delete headers[name]; } + }); +}; - /** - * Runs the `tasks` array of functions in series, each passing their results to - * the next in the array. However, if any of the `tasks` pass an error to their - * own callback, the next function is not executed, and the main `callback` is - * immediately called with the error. - * - * @name waterfall - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Array} tasks - An array of [async functions]{@link AsyncFunction} - * to run. - * Each function should complete with any number of `result` values. - * The `result` values will be passed as arguments, in order, to the next task. - * @param {Function} [callback] - An optional callback to run once all the - * functions have completed. This will be passed the results of the last task's - * callback. Invoked with (err, [results]). - * @returns undefined - * @example - * - * async.waterfall([ - * function(callback) { - * callback(null, 'one', 'two'); - * }, - * function(arg1, arg2, callback) { - * // arg1 now equals 'one' and arg2 now equals 'two' - * callback(null, 'three'); - * }, - * function(arg1, callback) { - * // arg1 now equals 'three' - * callback(null, 'done'); - * } - * ], function (err, result) { - * // result now equals 'done' - * }); - * - * // Or, with named functions: - * async.waterfall([ - * myFirstFunction, - * mySecondFunction, - * myLastFunction, - * ], function (err, result) { - * // result now equals 'done' - * }); - * function myFirstFunction(callback) { - * callback(null, 'one', 'two'); - * } - * function mySecondFunction(arg1, arg2, callback) { - * // arg1 now equals 'one' and arg2 now equals 'two' - * callback(null, 'three'); - * } - * function myLastFunction(arg1, callback) { - * // arg1 now equals 'three' - * callback(null, 'done'); - * } - */ - function waterfall (tasks, callback) { - callback = once(callback); - if (!Array.isArray(tasks)) return callback(new Error('First argument to waterfall must be an array of functions')); - if (!tasks.length) return callback(); - var taskIndex = 0; - function nextTask(args) { - var task = wrapAsync(tasks[taskIndex++]); - task(...args, onlyOnce(next)); - } +/***/ }), - function next(err, ...args) { - if (err === false) return - if (err || taskIndex === tasks.length) { - return callback(err, ...args); - } - nextTask(args); - } +/***/ 86455: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - nextTask([]); +"use strict"; + + +var utils = __nccwpck_require__(20328); + +// Headers whose duplicates are ignored by node +// c.f. https://nodejs.org/api/http.html#http_message_headers +var ignoreDuplicateOf = [ + 'age', 'authorization', 'content-length', 'content-type', 'etag', + 'expires', 'from', 'host', 'if-modified-since', 'if-unmodified-since', + 'last-modified', 'location', 'max-forwards', 'proxy-authorization', + 'referer', 'retry-after', 'user-agent' +]; + +/** + * Parse headers into an object + * + * ``` + * Date: Wed, 27 Aug 2014 08:58:49 GMT + * Content-Type: application/json + * Connection: keep-alive + * Transfer-Encoding: chunked + * ``` + * + * @param {String} headers Headers needing to be parsed + * @returns {Object} Headers parsed into an object + */ +module.exports = function parseHeaders(headers) { + var parsed = {}; + var key; + var val; + var i; + + if (!headers) { return parsed; } + + utils.forEach(headers.split('\n'), function parser(line) { + i = line.indexOf(':'); + key = utils.trim(line.substr(0, i)).toLowerCase(); + val = utils.trim(line.substr(i + 1)); + + if (key) { + if (parsed[key] && ignoreDuplicateOf.indexOf(key) >= 0) { + return; + } + if (key === 'set-cookie') { + parsed[key] = (parsed[key] ? parsed[key] : []).concat([val]); + } else { + parsed[key] = parsed[key] ? parsed[key] + ', ' + val : val; + } } + }); - var waterfall$1 = awaitify(waterfall); - - /** - * An "async function" in the context of Async is an asynchronous function with - * a variable number of parameters, with the final parameter being a callback. - * (`function (arg1, arg2, ..., callback) {}`) - * The final callback is of the form `callback(err, results...)`, which must be - * called once the function is completed. The callback should be called with a - * Error as its first argument to signal that an error occurred. - * Otherwise, if no error occurred, it should be called with `null` as the first - * argument, and any additional `result` arguments that may apply, to signal - * successful completion. - * The callback must be called exactly once, ideally on a later tick of the - * JavaScript event loop. - * - * This type of function is also referred to as a "Node-style async function", - * or a "continuation passing-style function" (CPS). Most of the methods of this - * library are themselves CPS/Node-style async functions, or functions that - * return CPS/Node-style async functions. - * - * Wherever we accept a Node-style async function, we also directly accept an - * [ES2017 `async` function]{@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function}. - * In this case, the `async` function will not be passed a final callback - * argument, and any thrown error will be used as the `err` argument of the - * implicit callback, and the return value will be used as the `result` value. - * (i.e. a `rejected` of the returned Promise becomes the `err` callback - * argument, and a `resolved` value becomes the `result`.) - * - * Note, due to JavaScript limitations, we can only detect native `async` - * functions and not transpilied implementations. - * Your environment must have `async`/`await` support for this to work. - * (e.g. Node > v7.6, or a recent version of a modern browser). - * If you are using `async` functions through a transpiler (e.g. Babel), you - * must still wrap the function with [asyncify]{@link module:Utils.asyncify}, - * because the `async function` will be compiled to an ordinary function that - * returns a promise. - * - * @typedef {Function} AsyncFunction - * @static - */ - - var index = { - apply, - applyEach: applyEach$1, - applyEachSeries, - asyncify, - auto, - autoInject, - cargo, - cargoQueue: cargo$1, - compose, - concat: concat$1, - concatLimit: concatLimit$1, - concatSeries: concatSeries$1, - constant, - detect: detect$1, - detectLimit: detectLimit$1, - detectSeries: detectSeries$1, - dir, - doUntil, - doWhilst: doWhilst$1, - each, - eachLimit: eachLimit$2, - eachOf: eachOf$1, - eachOfLimit: eachOfLimit$2, - eachOfSeries: eachOfSeries$1, - eachSeries: eachSeries$1, - ensureAsync, - every: every$1, - everyLimit: everyLimit$1, - everySeries: everySeries$1, - filter: filter$1, - filterLimit: filterLimit$1, - filterSeries: filterSeries$1, - forever: forever$1, - groupBy, - groupByLimit: groupByLimit$1, - groupBySeries, - log, - map: map$1, - mapLimit: mapLimit$1, - mapSeries: mapSeries$1, - mapValues, - mapValuesLimit: mapValuesLimit$1, - mapValuesSeries, - memoize, - nextTick, - parallel: parallel$1, - parallelLimit, - priorityQueue, - queue: queue$1, - race: race$1, - reduce: reduce$1, - reduceRight, - reflect, - reflectAll, - reject: reject$2, - rejectLimit: rejectLimit$1, - rejectSeries: rejectSeries$1, - retry, - retryable, - seq, - series, - setImmediate: setImmediate$1, - some: some$1, - someLimit: someLimit$1, - someSeries: someSeries$1, - sortBy: sortBy$1, - timeout, - times, - timesLimit, - timesSeries, - transform, - tryEach: tryEach$1, - unmemoize, - until, - waterfall: waterfall$1, - whilst: whilst$1, - - // aliases - all: every$1, - allLimit: everyLimit$1, - allSeries: everySeries$1, - any: some$1, - anyLimit: someLimit$1, - anySeries: someSeries$1, - find: detect$1, - findLimit: detectLimit$1, - findSeries: detectSeries$1, - flatMap: concat$1, - flatMapLimit: concatLimit$1, - flatMapSeries: concatSeries$1, - forEach: each, - forEachSeries: eachSeries$1, - forEachLimit: eachLimit$2, - forEachOf: eachOf$1, - forEachOfSeries: eachOfSeries$1, - forEachOfLimit: eachOfLimit$2, - inject: reduce$1, - foldl: reduce$1, - foldr: reduceRight, - select: filter$1, - selectLimit: filterLimit$1, - selectSeries: filterSeries$1, - wrapSync: asyncify, - during: whilst$1, - doDuring: doWhilst$1 - }; - - exports.default = index; - exports.apply = apply; - exports.applyEach = applyEach$1; - exports.applyEachSeries = applyEachSeries; - exports.asyncify = asyncify; - exports.auto = auto; - exports.autoInject = autoInject; - exports.cargo = cargo; - exports.cargoQueue = cargo$1; - exports.compose = compose; - exports.concat = concat$1; - exports.concatLimit = concatLimit$1; - exports.concatSeries = concatSeries$1; - exports.constant = constant; - exports.detect = detect$1; - exports.detectLimit = detectLimit$1; - exports.detectSeries = detectSeries$1; - exports.dir = dir; - exports.doUntil = doUntil; - exports.doWhilst = doWhilst$1; - exports.each = each; - exports.eachLimit = eachLimit$2; - exports.eachOf = eachOf$1; - exports.eachOfLimit = eachOfLimit$2; - exports.eachOfSeries = eachOfSeries$1; - exports.eachSeries = eachSeries$1; - exports.ensureAsync = ensureAsync; - exports.every = every$1; - exports.everyLimit = everyLimit$1; - exports.everySeries = everySeries$1; - exports.filter = filter$1; - exports.filterLimit = filterLimit$1; - exports.filterSeries = filterSeries$1; - exports.forever = forever$1; - exports.groupBy = groupBy; - exports.groupByLimit = groupByLimit$1; - exports.groupBySeries = groupBySeries; - exports.log = log; - exports.map = map$1; - exports.mapLimit = mapLimit$1; - exports.mapSeries = mapSeries$1; - exports.mapValues = mapValues; - exports.mapValuesLimit = mapValuesLimit$1; - exports.mapValuesSeries = mapValuesSeries; - exports.memoize = memoize; - exports.nextTick = nextTick; - exports.parallel = parallel$1; - exports.parallelLimit = parallelLimit; - exports.priorityQueue = priorityQueue; - exports.queue = queue$1; - exports.race = race$1; - exports.reduce = reduce$1; - exports.reduceRight = reduceRight; - exports.reflect = reflect; - exports.reflectAll = reflectAll; - exports.reject = reject$2; - exports.rejectLimit = rejectLimit$1; - exports.rejectSeries = rejectSeries$1; - exports.retry = retry; - exports.retryable = retryable; - exports.seq = seq; - exports.series = series; - exports.setImmediate = setImmediate$1; - exports.some = some$1; - exports.someLimit = someLimit$1; - exports.someSeries = someSeries$1; - exports.sortBy = sortBy$1; - exports.timeout = timeout; - exports.times = times; - exports.timesLimit = timesLimit; - exports.timesSeries = timesSeries; - exports.transform = transform; - exports.tryEach = tryEach$1; - exports.unmemoize = unmemoize; - exports.until = until; - exports.waterfall = waterfall$1; - exports.whilst = whilst$1; - exports.all = every$1; - exports.allLimit = everyLimit$1; - exports.allSeries = everySeries$1; - exports.any = some$1; - exports.anyLimit = someLimit$1; - exports.anySeries = someSeries$1; - exports.find = detect$1; - exports.findLimit = detectLimit$1; - exports.findSeries = detectSeries$1; - exports.flatMap = concat$1; - exports.flatMapLimit = concatLimit$1; - exports.flatMapSeries = concatSeries$1; - exports.forEach = each; - exports.forEachSeries = eachSeries$1; - exports.forEachLimit = eachLimit$2; - exports.forEachOf = eachOf$1; - exports.forEachOfSeries = eachOfSeries$1; - exports.forEachOfLimit = eachOfLimit$2; - exports.inject = reduce$1; - exports.foldl = reduce$1; - exports.foldr = reduceRight; - exports.select = filter$1; - exports.selectLimit = filterLimit$1; - exports.selectSeries = filterSeries$1; - exports.wrapSync = asyncify; - exports.during = whilst$1; - exports.doDuring = doWhilst$1; - - Object.defineProperty(exports, '__esModule', { value: true }); - -}))); - - -/***/ }), - -/***/ 14812: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = -{ - parallel : __nccwpck_require__(8210), - serial : __nccwpck_require__(50445), - serialOrdered : __nccwpck_require__(3578) -}; + return parsed; +}; /***/ }), -/***/ 1700: +/***/ 74850: /***/ ((module) => { -// API -module.exports = abort; - -/** - * Aborts leftover active jobs - * - * @param {object} state - current state object - */ -function abort(state) -{ - Object.keys(state.jobs).forEach(clean.bind(state)); +"use strict"; - // reset leftover jobs - state.jobs = {}; -} /** - * Cleans up leftover job by invoking abort function for the provided job id + * Syntactic sugar for invoking a function and expanding an array for arguments. * - * @this state - * @param {string|number} key - job id to abort + * Common use case would be to use `Function.prototype.apply`. + * + * ```js + * function f(x, y, z) {} + * var args = [1, 2, 3]; + * f.apply(null, args); + * ``` + * + * With `spread` this example can be re-written. + * + * ```js + * spread(function(x, y, z) {})([1, 2, 3]); + * ``` + * + * @param {Function} callback + * @returns {Function} */ -function clean(key) -{ - if (typeof this.jobs[key] == 'function') - { - this.jobs[key](); - } -} +module.exports = function spread(callback) { + return function wrap(arr) { + return callback.apply(null, arr); + }; +}; /***/ }), -/***/ 72794: +/***/ 20328: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var defer = __nccwpck_require__(15295); - -// API -module.exports = async; - -/** - * Runs provided callback asynchronously - * even if callback itself is not - * - * @param {function} callback - callback to invoke - * @returns {function} - augmented callback - */ -function async(callback) -{ - var isAsync = false; - - // check if async happened - defer(function() { isAsync = true; }); +"use strict"; - return function async_callback(err, result) - { - if (isAsync) - { - callback(err, result); - } - else - { - defer(function nextTick_callback() - { - callback(err, result); - }); - } - }; -} +var bind = __nccwpck_require__(77065); -/***/ }), +/*global toString:true*/ -/***/ 15295: -/***/ ((module) => { +// utils is a library of generic helper functions non-specific to axios -module.exports = defer; +var toString = Object.prototype.toString; /** - * Runs provided function on next iteration of the event loop + * Determine if a value is an Array * - * @param {function} fn - function to run + * @param {Object} val The value to test + * @returns {boolean} True if value is an Array, otherwise false */ -function defer(fn) -{ - var nextTick = typeof setImmediate == 'function' - ? setImmediate - : ( - typeof process == 'object' && typeof process.nextTick == 'function' - ? process.nextTick - : null - ); - - if (nextTick) - { - nextTick(fn); - } - else - { - setTimeout(fn, 0); - } +function isArray(val) { + return toString.call(val) === '[object Array]'; } - -/***/ }), - -/***/ 9023: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var async = __nccwpck_require__(72794) - , abort = __nccwpck_require__(1700) - ; - -// API -module.exports = iterate; - /** - * Iterates over each job object + * Determine if a value is undefined * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {object} state - current job status - * @param {function} callback - invoked when all elements processed + * @param {Object} val The value to test + * @returns {boolean} True if the value is undefined, otherwise false */ -function iterate(list, iterator, state, callback) -{ - // store current index - var key = state['keyedList'] ? state['keyedList'][state.index] : state.index; - - state.jobs[key] = runJob(iterator, key, list[key], function(error, output) - { - // don't repeat yourself - // skip secondary callbacks - if (!(key in state.jobs)) - { - return; - } - - // clean up jobs - delete state.jobs[key]; - - if (error) - { - // don't process rest of the results - // stop still active jobs - // and reset the list - abort(state); - } - else - { - state.results[key] = output; - } - - // return salvaged results - callback(error, state.results); - }); +function isUndefined(val) { + return typeof val === 'undefined'; } /** - * Runs iterator over provided job element + * Determine if a value is a Buffer * - * @param {function} iterator - iterator to invoke - * @param {string|number} key - key/index of the element in the list of jobs - * @param {mixed} item - job description - * @param {function} callback - invoked after iterator is done with the job - * @returns {function|mixed} - job abort function or something else + * @param {Object} val The value to test + * @returns {boolean} True if value is a Buffer, otherwise false */ -function runJob(iterator, key, item, callback) -{ - var aborter; - - // allow shortcut if iterator expects only two arguments - if (iterator.length == 2) - { - aborter = iterator(item, async(callback)); - } - // otherwise go with full three arguments - else - { - aborter = iterator(item, key, async(callback)); - } - - return aborter; +function isBuffer(val) { + return val !== null && !isUndefined(val) && val.constructor !== null && !isUndefined(val.constructor) + && typeof val.constructor.isBuffer === 'function' && val.constructor.isBuffer(val); } - -/***/ }), - -/***/ 42474: -/***/ ((module) => { - -// API -module.exports = state; - /** - * Creates initial state object - * for iteration over list + * Determine if a value is an ArrayBuffer * - * @param {array|object} list - list to iterate over - * @param {function|null} sortMethod - function to use for keys sort, - * or `null` to keep them as is - * @returns {object} - initial state object + * @param {Object} val The value to test + * @returns {boolean} True if value is an ArrayBuffer, otherwise false */ -function state(list, sortMethod) -{ - var isNamedList = !Array.isArray(list) - , initState = - { - index : 0, - keyedList: isNamedList || sortMethod ? Object.keys(list) : null, - jobs : {}, - results : isNamedList ? {} : [], - size : isNamedList ? Object.keys(list).length : list.length - } - ; - - if (sortMethod) - { - // sort array keys based on it's values - // sort object's keys just on own merit - initState.keyedList.sort(isNamedList ? sortMethod : function(a, b) - { - return sortMethod(list[a], list[b]); - }); - } - - return initState; +function isArrayBuffer(val) { + return toString.call(val) === '[object ArrayBuffer]'; } - -/***/ }), - -/***/ 37942: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var abort = __nccwpck_require__(1700) - , async = __nccwpck_require__(72794) - ; - -// API -module.exports = terminator; - /** - * Terminates jobs in the attached state context + * Determine if a value is a FormData * - * @this AsyncKitState# - * @param {function} callback - final callback to invoke after termination + * @param {Object} val The value to test + * @returns {boolean} True if value is an FormData, otherwise false */ -function terminator(callback) -{ - if (!Object.keys(this.jobs).length) - { - return; - } - - // fast forward iteration index - this.index = this.size; - - // abort jobs - abort(this); - - // send back results we have so far - async(callback)(null, this.results); +function isFormData(val) { + return (typeof FormData !== 'undefined') && (val instanceof FormData); } - -/***/ }), - -/***/ 8210: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var iterate = __nccwpck_require__(9023) - , initState = __nccwpck_require__(42474) - , terminator = __nccwpck_require__(37942) - ; - -// Public API -module.exports = parallel; - /** - * Runs iterator over provided array elements in parallel + * Determine if a value is a view on an ArrayBuffer * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} callback - invoked when all elements processed - * @returns {function} - jobs terminator + * @param {Object} val The value to test + * @returns {boolean} True if value is a view on an ArrayBuffer, otherwise false */ -function parallel(list, iterator, callback) -{ - var state = initState(list); - - while (state.index < (state['keyedList'] || list).length) - { - iterate(list, iterator, state, function(error, result) - { - if (error) - { - callback(error, result); - return; - } - - // looks like it's the last one - if (Object.keys(state.jobs).length === 0) - { - callback(null, state.results); - return; - } - }); - - state.index++; +function isArrayBufferView(val) { + var result; + if ((typeof ArrayBuffer !== 'undefined') && (ArrayBuffer.isView)) { + result = ArrayBuffer.isView(val); + } else { + result = (val) && (val.buffer) && (val.buffer instanceof ArrayBuffer); } - - return terminator.bind(state, callback); + return result; } - -/***/ }), - -/***/ 50445: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var serialOrdered = __nccwpck_require__(3578); - -// Public API -module.exports = serial; - /** - * Runs iterator over provided array elements in series + * Determine if a value is a String * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} callback - invoked when all elements processed - * @returns {function} - jobs terminator + * @param {Object} val The value to test + * @returns {boolean} True if value is a String, otherwise false */ -function serial(list, iterator, callback) -{ - return serialOrdered(list, iterator, null, callback); +function isString(val) { + return typeof val === 'string'; } - -/***/ }), - -/***/ 3578: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var iterate = __nccwpck_require__(9023) - , initState = __nccwpck_require__(42474) - , terminator = __nccwpck_require__(37942) - ; - -// Public API -module.exports = serialOrdered; -// sorting helpers -module.exports.ascending = ascending; -module.exports.descending = descending; - /** - * Runs iterator over provided sorted array elements in series + * Determine if a value is a Number * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} sortMethod - custom sort function - * @param {function} callback - invoked when all elements processed - * @returns {function} - jobs terminator + * @param {Object} val The value to test + * @returns {boolean} True if value is a Number, otherwise false */ -function serialOrdered(list, iterator, sortMethod, callback) -{ - var state = initState(list, sortMethod); - - iterate(list, iterator, state, function iteratorHandler(error, result) - { - if (error) - { - callback(error, result); - return; - } - - state.index++; - - // are we there yet? - if (state.index < (state['keyedList'] || list).length) - { - iterate(list, iterator, state, iteratorHandler); - return; - } - - // done here - callback(null, state.results); - }); - - return terminator.bind(state, callback); +function isNumber(val) { + return typeof val === 'number'; } -/* - * -- Sort methods - */ - /** - * sort helper to sort array elements in ascending order + * Determine if a value is an Object * - * @param {mixed} a - an item to compare - * @param {mixed} b - an item to compare - * @returns {number} - comparison result + * @param {Object} val The value to test + * @returns {boolean} True if value is an Object, otherwise false */ -function ascending(a, b) -{ - return a < b ? -1 : a > b ? 1 : 0; +function isObject(val) { + return val !== null && typeof val === 'object'; } /** - * sort helper to sort array elements in descending order + * Determine if a value is a plain Object * - * @param {mixed} a - an item to compare - * @param {mixed} b - an item to compare - * @returns {number} - comparison result + * @param {Object} val The value to test + * @return {boolean} True if value is a plain Object, otherwise false */ -function descending(a, b) -{ - return -1 * ascending(a, b); -} - - -/***/ }), - -/***/ 96342: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +function isPlainObject(val) { + if (toString.call(val) !== '[object Object]') { + return false; + } + var prototype = Object.getPrototypeOf(val); + return prototype === null || prototype === Object.prototype; +} -/*! - * Copyright 2010 LearnBoost - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 +/** + * Determine if a value is a Date * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * @param {Object} val The value to test + * @returns {boolean} True if value is a Date, otherwise false */ +function isDate(val) { + return toString.call(val) === '[object Date]'; +} /** - * Module dependencies. + * Determine if a value is a File + * + * @param {Object} val The value to test + * @returns {boolean} True if value is a File, otherwise false */ - -var crypto = __nccwpck_require__(76417) - , parse = __nccwpck_require__(78835).parse - ; +function isFile(val) { + return toString.call(val) === '[object File]'; +} /** - * Valid keys. + * Determine if a value is a Blob + * + * @param {Object} val The value to test + * @returns {boolean} True if value is a Blob, otherwise false */ - -var keys = - [ 'acl' - , 'location' - , 'logging' - , 'notification' - , 'partNumber' - , 'policy' - , 'requestPayment' - , 'torrent' - , 'uploadId' - , 'uploads' - , 'versionId' - , 'versioning' - , 'versions' - , 'website' - ] +function isBlob(val) { + return toString.call(val) === '[object Blob]'; +} /** - * Return an "Authorization" header value with the given `options` - * in the form of "AWS :" + * Determine if a value is a Function * - * @param {Object} options - * @return {String} - * @api private + * @param {Object} val The value to test + * @returns {boolean} True if value is a Function, otherwise false */ - -function authorization (options) { - return 'AWS ' + options.key + ':' + sign(options) +function isFunction(val) { + return toString.call(val) === '[object Function]'; } -module.exports = authorization -module.exports.authorization = authorization - /** - * Simple HMAC-SHA1 Wrapper + * Determine if a value is a Stream * - * @param {Object} options - * @return {String} - * @api private - */ - -function hmacSha1 (options) { - return crypto.createHmac('sha1', options.secret).update(options.message).digest('base64') + * @param {Object} val The value to test + * @returns {boolean} True if value is a Stream, otherwise false + */ +function isStream(val) { + return isObject(val) && isFunction(val.pipe); } -module.exports.hmacSha1 = hmacSha1 - /** - * Create a base64 sha1 HMAC for `options`. - * - * @param {Object} options - * @return {String} - * @api private + * Determine if a value is a URLSearchParams object + * + * @param {Object} val The value to test + * @returns {boolean} True if value is a URLSearchParams object, otherwise false */ - -function sign (options) { - options.message = stringToSign(options) - return hmacSha1(options) +function isURLSearchParams(val) { + return typeof URLSearchParams !== 'undefined' && val instanceof URLSearchParams; } -module.exports.sign = sign /** - * Create a base64 sha1 HMAC for `options`. + * Trim excess whitespace off the beginning and end of a string * - * Specifically to be used with S3 presigned URLs - * - * @param {Object} options - * @return {String} - * @api private + * @param {String} str The String to trim + * @returns {String} The String freed of excess whitespace */ - -function signQuery (options) { - options.message = queryStringToSign(options) - return hmacSha1(options) +function trim(str) { + return str.replace(/^\s*/, '').replace(/\s*$/, ''); } -module.exports.signQuery= signQuery /** - * Return a string for sign() with the given `options`. + * Determine if we're running in a standard browser environment * - * Spec: - * - * \n - * \n - * \n - * \n - * [headers\n] - * + * This allows axios to run in a web worker, and react-native. + * Both environments support XMLHttpRequest, but not fully standard globals. * - * @param {Object} options - * @return {String} - * @api private + * web workers: + * typeof window -> undefined + * typeof document -> undefined + * + * react-native: + * navigator.product -> 'ReactNative' + * nativescript + * navigator.product -> 'NativeScript' or 'NS' */ - -function stringToSign (options) { - var headers = options.amazonHeaders || '' - if (headers) headers += '\n' - var r = - [ options.verb - , options.md5 - , options.contentType - , options.date ? options.date.toUTCString() : '' - , headers + options.resource - ] - return r.join('\n') +function isStandardBrowserEnv() { + if (typeof navigator !== 'undefined' && (navigator.product === 'ReactNative' || + navigator.product === 'NativeScript' || + navigator.product === 'NS')) { + return false; + } + return ( + typeof window !== 'undefined' && + typeof document !== 'undefined' + ); } -module.exports.stringToSign = stringToSign /** - * Return a string for sign() with the given `options`, but is meant exclusively - * for S3 presigned URLs + * Iterate over an Array or an Object invoking a function for each item. * - * Spec: - * - * \n - * + * If `obj` is an Array callback will be called passing + * the value, index, and complete array for each item. * - * @param {Object} options - * @return {String} - * @api private + * If 'obj' is an Object callback will be called passing + * the value, key, and complete object for each property. + * + * @param {Object|Array} obj The object to iterate + * @param {Function} fn The callback to invoke for each item */ +function forEach(obj, fn) { + // Don't bother if no value provided + if (obj === null || typeof obj === 'undefined') { + return; + } -function queryStringToSign (options){ - return 'GET\n\n\n' + options.date + '\n' + options.resource + // Force an array if not already something iterable + if (typeof obj !== 'object') { + /*eslint no-param-reassign:0*/ + obj = [obj]; + } + + if (isArray(obj)) { + // Iterate over array values + for (var i = 0, l = obj.length; i < l; i++) { + fn.call(null, obj[i], i, obj); + } + } else { + // Iterate over object keys + for (var key in obj) { + if (Object.prototype.hasOwnProperty.call(obj, key)) { + fn.call(null, obj[key], key, obj); + } + } + } } -module.exports.queryStringToSign = queryStringToSign /** - * Perform the following: + * Accepts varargs expecting each argument to be an object, then + * immutably merges the properties of each object and returns result. * - * - ignore non-amazon headers - * - lowercase fields - * - sort lexicographically - * - trim whitespace between ":" - * - join with newline + * When multiple objects contain the same key the later object in + * the arguments list will take precedence. * - * @param {Object} headers - * @return {String} - * @api private + * Example: + * + * ```js + * var result = merge({foo: 123}, {foo: 456}); + * console.log(result.foo); // outputs 456 + * ``` + * + * @param {Object} obj1 Object to merge + * @returns {Object} Result of all merge properties */ +function merge(/* obj1, obj2, obj3, ... */) { + var result = {}; + function assignValue(val, key) { + if (isPlainObject(result[key]) && isPlainObject(val)) { + result[key] = merge(result[key], val); + } else if (isPlainObject(val)) { + result[key] = merge({}, val); + } else if (isArray(val)) { + result[key] = val.slice(); + } else { + result[key] = val; + } + } -function canonicalizeHeaders (headers) { - var buf = [] - , fields = Object.keys(headers) - ; - for (var i = 0, len = fields.length; i < len; ++i) { - var field = fields[i] - , val = headers[field] - , field = field.toLowerCase() - ; - if (0 !== field.indexOf('x-amz')) continue - buf.push(field + ':' + val) + for (var i = 0, l = arguments.length; i < l; i++) { + forEach(arguments[i], assignValue); } - return buf.sort().join('\n') + return result; } -module.exports.canonicalizeHeaders = canonicalizeHeaders /** - * Perform the following: - * - * - ignore non sub-resources - * - sort lexicographically + * Extends object a by mutably adding to it the properties of object b. * - * @param {String} resource - * @return {String} - * @api private + * @param {Object} a The object to be extended + * @param {Object} b The object to copy properties from + * @param {Object} thisArg The object to bind function to + * @return {Object} The resulting value of object a */ +function extend(a, b, thisArg) { + forEach(b, function assignValue(val, key) { + if (thisArg && typeof val === 'function') { + a[key] = bind(val, thisArg); + } else { + a[key] = val; + } + }); + return a; +} -function canonicalizeResource (resource) { - var url = parse(resource, true) - , path = url.pathname - , buf = [] - ; - - Object.keys(url.query).forEach(function(key){ - if (!~keys.indexOf(key)) return - var val = '' == url.query[key] ? '' : '=' + encodeURIComponent(url.query[key]) - buf.push(key + val) - }) - - return path + (buf.length ? '?' + buf.sort().join('&') : '') +/** + * Remove byte order marker. This catches EF BB BF (the UTF-8 BOM) + * + * @param {string} content with BOM + * @return {string} content value without BOM + */ +function stripBOM(content) { + if (content.charCodeAt(0) === 0xFEFF) { + content = content.slice(1); + } + return content; } -module.exports.canonicalizeResource = canonicalizeResource + +module.exports = { + isArray: isArray, + isArrayBuffer: isArrayBuffer, + isBuffer: isBuffer, + isFormData: isFormData, + isArrayBufferView: isArrayBufferView, + isString: isString, + isNumber: isNumber, + isObject: isObject, + isPlainObject: isPlainObject, + isUndefined: isUndefined, + isDate: isDate, + isFile: isFile, + isBlob: isBlob, + isFunction: isFunction, + isStream: isStream, + isURLSearchParams: isURLSearchParams, + isStandardBrowserEnv: isStandardBrowserEnv, + forEach: forEach, + merge: merge, + extend: extend, + trim: trim, + stripBOM: stripBOM +}; /***/ }), -/***/ 16071: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 9417: +/***/ ((module) => { -var aws4 = exports, - url = __nccwpck_require__(78835), - querystring = __nccwpck_require__(71191), - crypto = __nccwpck_require__(76417), - lru = __nccwpck_require__(74225), - credentialsCache = lru(1000) +"use strict"; -// http://docs.amazonwebservices.com/general/latest/gr/signature-version-4.html +module.exports = balanced; +function balanced(a, b, str) { + if (a instanceof RegExp) a = maybeMatch(a, str); + if (b instanceof RegExp) b = maybeMatch(b, str); -function hmac(key, string, encoding) { - return crypto.createHmac('sha256', key).update(string, 'utf8').digest(encoding) -} + var r = range(a, b, str); -function hash(string, encoding) { - return crypto.createHash('sha256').update(string, 'utf8').digest(encoding) -} - -// This function assumes the string has already been percent encoded -function encodeRfc3986(urlEncodedString) { - return urlEncodedString.replace(/[!'()*]/g, function(c) { - return '%' + c.charCodeAt(0).toString(16).toUpperCase() - }) -} - -function encodeRfc3986Full(str) { - return encodeRfc3986(encodeURIComponent(str)) + return r && { + start: r[0], + end: r[1], + pre: str.slice(0, r[0]), + body: str.slice(r[0] + a.length, r[1]), + post: str.slice(r[1] + b.length) + }; } -// A bit of a combination of: -// https://github.com/aws/aws-sdk-java-v2/blob/dc695de6ab49ad03934e1b02e7263abbd2354be0/core/auth/src/main/java/software/amazon/awssdk/auth/signer/internal/AbstractAws4Signer.java#L59 -// https://github.com/aws/aws-sdk-js/blob/18cb7e5b463b46239f9fdd4a65e2ff8c81831e8f/lib/signers/v4.js#L191-L199 -// https://github.com/mhart/aws4fetch/blob/b3aed16b6f17384cf36ea33bcba3c1e9f3bdfefd/src/main.js#L25-L34 -var HEADERS_TO_IGNORE = { - 'authorization': true, - 'connection': true, - 'x-amzn-trace-id': true, - 'user-agent': true, - 'expect': true, - 'presigned-expires': true, - 'range': true, +function maybeMatch(reg, str) { + var m = str.match(reg); + return m ? m[0] : null; } -// request: { path | body, [host], [method], [headers], [service], [region] } -// credentials: { accessKeyId, secretAccessKey, [sessionToken] } -function RequestSigner(request, credentials) { - - if (typeof request === 'string') request = url.parse(request) - - var headers = request.headers = (request.headers || {}), - hostParts = (!this.service || !this.region) && this.matchHost(request.hostname || request.host || headers.Host || headers.host) - - this.request = request - this.credentials = credentials || this.defaultCredentials() - - this.service = request.service || hostParts[0] || '' - this.region = request.region || hostParts[1] || 'us-east-1' - - // SES uses a different domain from the service name - if (this.service === 'email') this.service = 'ses' - - if (!request.method && request.body) - request.method = 'POST' - - if (!headers.Host && !headers.host) { - headers.Host = request.hostname || request.host || this.createHost() +balanced.range = range; +function range(a, b, str) { + var begs, beg, left, right, result; + var ai = str.indexOf(a); + var bi = str.indexOf(b, ai + 1); + var i = ai; - // If a port is specified explicitly, use it as is - if (request.port) - headers.Host += ':' + request.port - } - if (!request.hostname && !request.host) - request.hostname = headers.Host || headers.host + if (ai >= 0 && bi > 0) { + begs = []; + left = str.length; - this.isCodeCommitGit = this.service === 'codecommit' && request.method === 'GIT' -} + while (i >= 0 && !result) { + if (i == ai) { + begs.push(i); + ai = str.indexOf(a, i + 1); + } else if (begs.length == 1) { + result = [ begs.pop(), bi ]; + } else { + beg = begs.pop(); + if (beg < left) { + left = beg; + right = bi; + } -RequestSigner.prototype.matchHost = function(host) { - var match = (host || '').match(/([^\.]+)\.(?:([^\.]*)\.)?amazonaws\.com(\.cn)?$/) - var hostParts = (match || []).slice(1, 3) + bi = str.indexOf(b, i + 1); + } - // ES's hostParts are sometimes the other way round, if the value that is expected - // to be region equals ‘es’ switch them back - // e.g. search-cluster-name-aaaa00aaaa0aaa0aaaaaaa0aaa.us-east-1.es.amazonaws.com - if (hostParts[1] === 'es') - hostParts = hostParts.reverse() + i = ai < bi && ai >= 0 ? ai : bi; + } - if (hostParts[1] == 's3') { - hostParts[0] = 's3' - hostParts[1] = 'us-east-1' - } else { - for (var i = 0; i < 2; i++) { - if (/^s3-/.test(hostParts[i])) { - hostParts[1] = hostParts[i].slice(3) - hostParts[0] = 's3' - break - } + if (begs.length) { + result = [ left, right ]; } } - return hostParts -} - -// http://docs.aws.amazon.com/general/latest/gr/rande.html -RequestSigner.prototype.isSingleRegion = function() { - // Special case for S3 and SimpleDB in us-east-1 - if (['s3', 'sdb'].indexOf(this.service) >= 0 && this.region === 'us-east-1') return true - - return ['cloudfront', 'ls', 'route53', 'iam', 'importexport', 'sts'] - .indexOf(this.service) >= 0 -} - -RequestSigner.prototype.createHost = function() { - var region = this.isSingleRegion() ? '' : '.' + this.region, - subdomain = this.service === 'ses' ? 'email' : this.service - return subdomain + region + '.amazonaws.com' + return result; } -RequestSigner.prototype.prepareRequest = function() { - this.parsePath() - var request = this.request, headers = request.headers, query +/***/ }), - if (request.signQuery) { +/***/ 45447: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - this.parsedPath.query = query = this.parsedPath.query || {} +"use strict"; - if (this.credentials.sessionToken) - query['X-Amz-Security-Token'] = this.credentials.sessionToken - if (this.service === 's3' && !query['X-Amz-Expires']) - query['X-Amz-Expires'] = 86400 +var crypto_hash_sha512 = __nccwpck_require__(68729).lowlevel.crypto_hash; - if (query['X-Amz-Date']) - this.datetime = query['X-Amz-Date'] - else - query['X-Amz-Date'] = this.getDateTime() +/* + * This file is a 1:1 port from the OpenBSD blowfish.c and bcrypt_pbkdf.c. As a + * result, it retains the original copyright and license. The two files are + * under slightly different (but compatible) licenses, and are here combined in + * one file. + * + * Credit for the actual porting work goes to: + * Devi Mandiri + */ - query['X-Amz-Algorithm'] = 'AWS4-HMAC-SHA256' - query['X-Amz-Credential'] = this.credentials.accessKeyId + '/' + this.credentialString() - query['X-Amz-SignedHeaders'] = this.signedHeaders() +/* + * The Blowfish portions are under the following license: + * + * Blowfish block cipher for OpenBSD + * Copyright 1997 Niels Provos + * All rights reserved. + * + * Implementation advice by David Mazieres . + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. The name of the author may not be used to endorse or promote products + * derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR + * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. + * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF + * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ - } else { +/* + * The bcrypt_pbkdf portions are under the following license: + * + * Copyright (c) 2013 Ted Unangst + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ - if (!request.doNotModifyHeaders && !this.isCodeCommitGit) { - if (request.body && !headers['Content-Type'] && !headers['content-type']) - headers['Content-Type'] = 'application/x-www-form-urlencoded; charset=utf-8' +/* + * Performance improvements (Javascript-specific): + * + * Copyright 2016, Joyent Inc + * Author: Alex Wilson + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ - if (request.body && !headers['Content-Length'] && !headers['content-length']) - headers['Content-Length'] = Buffer.byteLength(request.body) +// Ported from OpenBSD bcrypt_pbkdf.c v1.9 - if (this.credentials.sessionToken && !headers['X-Amz-Security-Token'] && !headers['x-amz-security-token']) - headers['X-Amz-Security-Token'] = this.credentials.sessionToken +var BLF_J = 0; - if (this.service === 's3' && !headers['X-Amz-Content-Sha256'] && !headers['x-amz-content-sha256']) - headers['X-Amz-Content-Sha256'] = hash(this.request.body || '', 'hex') +var Blowfish = function() { + this.S = [ + new Uint32Array([ + 0xd1310ba6, 0x98dfb5ac, 0x2ffd72db, 0xd01adfb7, + 0xb8e1afed, 0x6a267e96, 0xba7c9045, 0xf12c7f99, + 0x24a19947, 0xb3916cf7, 0x0801f2e2, 0x858efc16, + 0x636920d8, 0x71574e69, 0xa458fea3, 0xf4933d7e, + 0x0d95748f, 0x728eb658, 0x718bcd58, 0x82154aee, + 0x7b54a41d, 0xc25a59b5, 0x9c30d539, 0x2af26013, + 0xc5d1b023, 0x286085f0, 0xca417918, 0xb8db38ef, + 0x8e79dcb0, 0x603a180e, 0x6c9e0e8b, 0xb01e8a3e, + 0xd71577c1, 0xbd314b27, 0x78af2fda, 0x55605c60, + 0xe65525f3, 0xaa55ab94, 0x57489862, 0x63e81440, + 0x55ca396a, 0x2aab10b6, 0xb4cc5c34, 0x1141e8ce, + 0xa15486af, 0x7c72e993, 0xb3ee1411, 0x636fbc2a, + 0x2ba9c55d, 0x741831f6, 0xce5c3e16, 0x9b87931e, + 0xafd6ba33, 0x6c24cf5c, 0x7a325381, 0x28958677, + 0x3b8f4898, 0x6b4bb9af, 0xc4bfe81b, 0x66282193, + 0x61d809cc, 0xfb21a991, 0x487cac60, 0x5dec8032, + 0xef845d5d, 0xe98575b1, 0xdc262302, 0xeb651b88, + 0x23893e81, 0xd396acc5, 0x0f6d6ff3, 0x83f44239, + 0x2e0b4482, 0xa4842004, 0x69c8f04a, 0x9e1f9b5e, + 0x21c66842, 0xf6e96c9a, 0x670c9c61, 0xabd388f0, + 0x6a51a0d2, 0xd8542f68, 0x960fa728, 0xab5133a3, + 0x6eef0b6c, 0x137a3be4, 0xba3bf050, 0x7efb2a98, + 0xa1f1651d, 0x39af0176, 0x66ca593e, 0x82430e88, + 0x8cee8619, 0x456f9fb4, 0x7d84a5c3, 0x3b8b5ebe, + 0xe06f75d8, 0x85c12073, 0x401a449f, 0x56c16aa6, + 0x4ed3aa62, 0x363f7706, 0x1bfedf72, 0x429b023d, + 0x37d0d724, 0xd00a1248, 0xdb0fead3, 0x49f1c09b, + 0x075372c9, 0x80991b7b, 0x25d479d8, 0xf6e8def7, + 0xe3fe501a, 0xb6794c3b, 0x976ce0bd, 0x04c006ba, + 0xc1a94fb6, 0x409f60c4, 0x5e5c9ec2, 0x196a2463, + 0x68fb6faf, 0x3e6c53b5, 0x1339b2eb, 0x3b52ec6f, + 0x6dfc511f, 0x9b30952c, 0xcc814544, 0xaf5ebd09, + 0xbee3d004, 0xde334afd, 0x660f2807, 0x192e4bb3, + 0xc0cba857, 0x45c8740f, 0xd20b5f39, 0xb9d3fbdb, + 0x5579c0bd, 0x1a60320a, 0xd6a100c6, 0x402c7279, + 0x679f25fe, 0xfb1fa3cc, 0x8ea5e9f8, 0xdb3222f8, + 0x3c7516df, 0xfd616b15, 0x2f501ec8, 0xad0552ab, + 0x323db5fa, 0xfd238760, 0x53317b48, 0x3e00df82, + 0x9e5c57bb, 0xca6f8ca0, 0x1a87562e, 0xdf1769db, + 0xd542a8f6, 0x287effc3, 0xac6732c6, 0x8c4f5573, + 0x695b27b0, 0xbbca58c8, 0xe1ffa35d, 0xb8f011a0, + 0x10fa3d98, 0xfd2183b8, 0x4afcb56c, 0x2dd1d35b, + 0x9a53e479, 0xb6f84565, 0xd28e49bc, 0x4bfb9790, + 0xe1ddf2da, 0xa4cb7e33, 0x62fb1341, 0xcee4c6e8, + 0xef20cada, 0x36774c01, 0xd07e9efe, 0x2bf11fb4, + 0x95dbda4d, 0xae909198, 0xeaad8e71, 0x6b93d5a0, + 0xd08ed1d0, 0xafc725e0, 0x8e3c5b2f, 0x8e7594b7, + 0x8ff6e2fb, 0xf2122b64, 0x8888b812, 0x900df01c, + 0x4fad5ea0, 0x688fc31c, 0xd1cff191, 0xb3a8c1ad, + 0x2f2f2218, 0xbe0e1777, 0xea752dfe, 0x8b021fa1, + 0xe5a0cc0f, 0xb56f74e8, 0x18acf3d6, 0xce89e299, + 0xb4a84fe0, 0xfd13e0b7, 0x7cc43b81, 0xd2ada8d9, + 0x165fa266, 0x80957705, 0x93cc7314, 0x211a1477, + 0xe6ad2065, 0x77b5fa86, 0xc75442f5, 0xfb9d35cf, + 0xebcdaf0c, 0x7b3e89a0, 0xd6411bd3, 0xae1e7e49, + 0x00250e2d, 0x2071b35e, 0x226800bb, 0x57b8e0af, + 0x2464369b, 0xf009b91e, 0x5563911d, 0x59dfa6aa, + 0x78c14389, 0xd95a537f, 0x207d5ba2, 0x02e5b9c5, + 0x83260376, 0x6295cfa9, 0x11c81968, 0x4e734a41, + 0xb3472dca, 0x7b14a94a, 0x1b510052, 0x9a532915, + 0xd60f573f, 0xbc9bc6e4, 0x2b60a476, 0x81e67400, + 0x08ba6fb5, 0x571be91f, 0xf296ec6b, 0x2a0dd915, + 0xb6636521, 0xe7b9f9b6, 0xff34052e, 0xc5855664, + 0x53b02d5d, 0xa99f8fa1, 0x08ba4799, 0x6e85076a]), + new Uint32Array([ + 0x4b7a70e9, 0xb5b32944, 0xdb75092e, 0xc4192623, + 0xad6ea6b0, 0x49a7df7d, 0x9cee60b8, 0x8fedb266, + 0xecaa8c71, 0x699a17ff, 0x5664526c, 0xc2b19ee1, + 0x193602a5, 0x75094c29, 0xa0591340, 0xe4183a3e, + 0x3f54989a, 0x5b429d65, 0x6b8fe4d6, 0x99f73fd6, + 0xa1d29c07, 0xefe830f5, 0x4d2d38e6, 0xf0255dc1, + 0x4cdd2086, 0x8470eb26, 0x6382e9c6, 0x021ecc5e, + 0x09686b3f, 0x3ebaefc9, 0x3c971814, 0x6b6a70a1, + 0x687f3584, 0x52a0e286, 0xb79c5305, 0xaa500737, + 0x3e07841c, 0x7fdeae5c, 0x8e7d44ec, 0x5716f2b8, + 0xb03ada37, 0xf0500c0d, 0xf01c1f04, 0x0200b3ff, + 0xae0cf51a, 0x3cb574b2, 0x25837a58, 0xdc0921bd, + 0xd19113f9, 0x7ca92ff6, 0x94324773, 0x22f54701, + 0x3ae5e581, 0x37c2dadc, 0xc8b57634, 0x9af3dda7, + 0xa9446146, 0x0fd0030e, 0xecc8c73e, 0xa4751e41, + 0xe238cd99, 0x3bea0e2f, 0x3280bba1, 0x183eb331, + 0x4e548b38, 0x4f6db908, 0x6f420d03, 0xf60a04bf, + 0x2cb81290, 0x24977c79, 0x5679b072, 0xbcaf89af, + 0xde9a771f, 0xd9930810, 0xb38bae12, 0xdccf3f2e, + 0x5512721f, 0x2e6b7124, 0x501adde6, 0x9f84cd87, + 0x7a584718, 0x7408da17, 0xbc9f9abc, 0xe94b7d8c, + 0xec7aec3a, 0xdb851dfa, 0x63094366, 0xc464c3d2, + 0xef1c1847, 0x3215d908, 0xdd433b37, 0x24c2ba16, + 0x12a14d43, 0x2a65c451, 0x50940002, 0x133ae4dd, + 0x71dff89e, 0x10314e55, 0x81ac77d6, 0x5f11199b, + 0x043556f1, 0xd7a3c76b, 0x3c11183b, 0x5924a509, + 0xf28fe6ed, 0x97f1fbfa, 0x9ebabf2c, 0x1e153c6e, + 0x86e34570, 0xeae96fb1, 0x860e5e0a, 0x5a3e2ab3, + 0x771fe71c, 0x4e3d06fa, 0x2965dcb9, 0x99e71d0f, + 0x803e89d6, 0x5266c825, 0x2e4cc978, 0x9c10b36a, + 0xc6150eba, 0x94e2ea78, 0xa5fc3c53, 0x1e0a2df4, + 0xf2f74ea7, 0x361d2b3d, 0x1939260f, 0x19c27960, + 0x5223a708, 0xf71312b6, 0xebadfe6e, 0xeac31f66, + 0xe3bc4595, 0xa67bc883, 0xb17f37d1, 0x018cff28, + 0xc332ddef, 0xbe6c5aa5, 0x65582185, 0x68ab9802, + 0xeecea50f, 0xdb2f953b, 0x2aef7dad, 0x5b6e2f84, + 0x1521b628, 0x29076170, 0xecdd4775, 0x619f1510, + 0x13cca830, 0xeb61bd96, 0x0334fe1e, 0xaa0363cf, + 0xb5735c90, 0x4c70a239, 0xd59e9e0b, 0xcbaade14, + 0xeecc86bc, 0x60622ca7, 0x9cab5cab, 0xb2f3846e, + 0x648b1eaf, 0x19bdf0ca, 0xa02369b9, 0x655abb50, + 0x40685a32, 0x3c2ab4b3, 0x319ee9d5, 0xc021b8f7, + 0x9b540b19, 0x875fa099, 0x95f7997e, 0x623d7da8, + 0xf837889a, 0x97e32d77, 0x11ed935f, 0x16681281, + 0x0e358829, 0xc7e61fd6, 0x96dedfa1, 0x7858ba99, + 0x57f584a5, 0x1b227263, 0x9b83c3ff, 0x1ac24696, + 0xcdb30aeb, 0x532e3054, 0x8fd948e4, 0x6dbc3128, + 0x58ebf2ef, 0x34c6ffea, 0xfe28ed61, 0xee7c3c73, + 0x5d4a14d9, 0xe864b7e3, 0x42105d14, 0x203e13e0, + 0x45eee2b6, 0xa3aaabea, 0xdb6c4f15, 0xfacb4fd0, + 0xc742f442, 0xef6abbb5, 0x654f3b1d, 0x41cd2105, + 0xd81e799e, 0x86854dc7, 0xe44b476a, 0x3d816250, + 0xcf62a1f2, 0x5b8d2646, 0xfc8883a0, 0xc1c7b6a3, + 0x7f1524c3, 0x69cb7492, 0x47848a0b, 0x5692b285, + 0x095bbf00, 0xad19489d, 0x1462b174, 0x23820e00, + 0x58428d2a, 0x0c55f5ea, 0x1dadf43e, 0x233f7061, + 0x3372f092, 0x8d937e41, 0xd65fecf1, 0x6c223bdb, + 0x7cde3759, 0xcbee7460, 0x4085f2a7, 0xce77326e, + 0xa6078084, 0x19f8509e, 0xe8efd855, 0x61d99735, + 0xa969a7aa, 0xc50c06c2, 0x5a04abfc, 0x800bcadc, + 0x9e447a2e, 0xc3453484, 0xfdd56705, 0x0e1e9ec9, + 0xdb73dbd3, 0x105588cd, 0x675fda79, 0xe3674340, + 0xc5c43465, 0x713e38d8, 0x3d28f89e, 0xf16dff20, + 0x153e21e7, 0x8fb03d4a, 0xe6e39f2b, 0xdb83adf7]), + new Uint32Array([ + 0xe93d5a68, 0x948140f7, 0xf64c261c, 0x94692934, + 0x411520f7, 0x7602d4f7, 0xbcf46b2e, 0xd4a20068, + 0xd4082471, 0x3320f46a, 0x43b7d4b7, 0x500061af, + 0x1e39f62e, 0x97244546, 0x14214f74, 0xbf8b8840, + 0x4d95fc1d, 0x96b591af, 0x70f4ddd3, 0x66a02f45, + 0xbfbc09ec, 0x03bd9785, 0x7fac6dd0, 0x31cb8504, + 0x96eb27b3, 0x55fd3941, 0xda2547e6, 0xabca0a9a, + 0x28507825, 0x530429f4, 0x0a2c86da, 0xe9b66dfb, + 0x68dc1462, 0xd7486900, 0x680ec0a4, 0x27a18dee, + 0x4f3ffea2, 0xe887ad8c, 0xb58ce006, 0x7af4d6b6, + 0xaace1e7c, 0xd3375fec, 0xce78a399, 0x406b2a42, + 0x20fe9e35, 0xd9f385b9, 0xee39d7ab, 0x3b124e8b, + 0x1dc9faf7, 0x4b6d1856, 0x26a36631, 0xeae397b2, + 0x3a6efa74, 0xdd5b4332, 0x6841e7f7, 0xca7820fb, + 0xfb0af54e, 0xd8feb397, 0x454056ac, 0xba489527, + 0x55533a3a, 0x20838d87, 0xfe6ba9b7, 0xd096954b, + 0x55a867bc, 0xa1159a58, 0xcca92963, 0x99e1db33, + 0xa62a4a56, 0x3f3125f9, 0x5ef47e1c, 0x9029317c, + 0xfdf8e802, 0x04272f70, 0x80bb155c, 0x05282ce3, + 0x95c11548, 0xe4c66d22, 0x48c1133f, 0xc70f86dc, + 0x07f9c9ee, 0x41041f0f, 0x404779a4, 0x5d886e17, + 0x325f51eb, 0xd59bc0d1, 0xf2bcc18f, 0x41113564, + 0x257b7834, 0x602a9c60, 0xdff8e8a3, 0x1f636c1b, + 0x0e12b4c2, 0x02e1329e, 0xaf664fd1, 0xcad18115, + 0x6b2395e0, 0x333e92e1, 0x3b240b62, 0xeebeb922, + 0x85b2a20e, 0xe6ba0d99, 0xde720c8c, 0x2da2f728, + 0xd0127845, 0x95b794fd, 0x647d0862, 0xe7ccf5f0, + 0x5449a36f, 0x877d48fa, 0xc39dfd27, 0xf33e8d1e, + 0x0a476341, 0x992eff74, 0x3a6f6eab, 0xf4f8fd37, + 0xa812dc60, 0xa1ebddf8, 0x991be14c, 0xdb6e6b0d, + 0xc67b5510, 0x6d672c37, 0x2765d43b, 0xdcd0e804, + 0xf1290dc7, 0xcc00ffa3, 0xb5390f92, 0x690fed0b, + 0x667b9ffb, 0xcedb7d9c, 0xa091cf0b, 0xd9155ea3, + 0xbb132f88, 0x515bad24, 0x7b9479bf, 0x763bd6eb, + 0x37392eb3, 0xcc115979, 0x8026e297, 0xf42e312d, + 0x6842ada7, 0xc66a2b3b, 0x12754ccc, 0x782ef11c, + 0x6a124237, 0xb79251e7, 0x06a1bbe6, 0x4bfb6350, + 0x1a6b1018, 0x11caedfa, 0x3d25bdd8, 0xe2e1c3c9, + 0x44421659, 0x0a121386, 0xd90cec6e, 0xd5abea2a, + 0x64af674e, 0xda86a85f, 0xbebfe988, 0x64e4c3fe, + 0x9dbc8057, 0xf0f7c086, 0x60787bf8, 0x6003604d, + 0xd1fd8346, 0xf6381fb0, 0x7745ae04, 0xd736fccc, + 0x83426b33, 0xf01eab71, 0xb0804187, 0x3c005e5f, + 0x77a057be, 0xbde8ae24, 0x55464299, 0xbf582e61, + 0x4e58f48f, 0xf2ddfda2, 0xf474ef38, 0x8789bdc2, + 0x5366f9c3, 0xc8b38e74, 0xb475f255, 0x46fcd9b9, + 0x7aeb2661, 0x8b1ddf84, 0x846a0e79, 0x915f95e2, + 0x466e598e, 0x20b45770, 0x8cd55591, 0xc902de4c, + 0xb90bace1, 0xbb8205d0, 0x11a86248, 0x7574a99e, + 0xb77f19b6, 0xe0a9dc09, 0x662d09a1, 0xc4324633, + 0xe85a1f02, 0x09f0be8c, 0x4a99a025, 0x1d6efe10, + 0x1ab93d1d, 0x0ba5a4df, 0xa186f20f, 0x2868f169, + 0xdcb7da83, 0x573906fe, 0xa1e2ce9b, 0x4fcd7f52, + 0x50115e01, 0xa70683fa, 0xa002b5c4, 0x0de6d027, + 0x9af88c27, 0x773f8641, 0xc3604c06, 0x61a806b5, + 0xf0177a28, 0xc0f586e0, 0x006058aa, 0x30dc7d62, + 0x11e69ed7, 0x2338ea63, 0x53c2dd94, 0xc2c21634, + 0xbbcbee56, 0x90bcb6de, 0xebfc7da1, 0xce591d76, + 0x6f05e409, 0x4b7c0188, 0x39720a3d, 0x7c927c24, + 0x86e3725f, 0x724d9db9, 0x1ac15bb4, 0xd39eb8fc, + 0xed545578, 0x08fca5b5, 0xd83d7cd3, 0x4dad0fc4, + 0x1e50ef5e, 0xb161e6f8, 0xa28514d9, 0x6c51133c, + 0x6fd5c7e7, 0x56e14ec4, 0x362abfce, 0xddc6c837, + 0xd79a3234, 0x92638212, 0x670efa8e, 0x406000e0]), + new Uint32Array([ + 0x3a39ce37, 0xd3faf5cf, 0xabc27737, 0x5ac52d1b, + 0x5cb0679e, 0x4fa33742, 0xd3822740, 0x99bc9bbe, + 0xd5118e9d, 0xbf0f7315, 0xd62d1c7e, 0xc700c47b, + 0xb78c1b6b, 0x21a19045, 0xb26eb1be, 0x6a366eb4, + 0x5748ab2f, 0xbc946e79, 0xc6a376d2, 0x6549c2c8, + 0x530ff8ee, 0x468dde7d, 0xd5730a1d, 0x4cd04dc6, + 0x2939bbdb, 0xa9ba4650, 0xac9526e8, 0xbe5ee304, + 0xa1fad5f0, 0x6a2d519a, 0x63ef8ce2, 0x9a86ee22, + 0xc089c2b8, 0x43242ef6, 0xa51e03aa, 0x9cf2d0a4, + 0x83c061ba, 0x9be96a4d, 0x8fe51550, 0xba645bd6, + 0x2826a2f9, 0xa73a3ae1, 0x4ba99586, 0xef5562e9, + 0xc72fefd3, 0xf752f7da, 0x3f046f69, 0x77fa0a59, + 0x80e4a915, 0x87b08601, 0x9b09e6ad, 0x3b3ee593, + 0xe990fd5a, 0x9e34d797, 0x2cf0b7d9, 0x022b8b51, + 0x96d5ac3a, 0x017da67d, 0xd1cf3ed6, 0x7c7d2d28, + 0x1f9f25cf, 0xadf2b89b, 0x5ad6b472, 0x5a88f54c, + 0xe029ac71, 0xe019a5e6, 0x47b0acfd, 0xed93fa9b, + 0xe8d3c48d, 0x283b57cc, 0xf8d56629, 0x79132e28, + 0x785f0191, 0xed756055, 0xf7960e44, 0xe3d35e8c, + 0x15056dd4, 0x88f46dba, 0x03a16125, 0x0564f0bd, + 0xc3eb9e15, 0x3c9057a2, 0x97271aec, 0xa93a072a, + 0x1b3f6d9b, 0x1e6321f5, 0xf59c66fb, 0x26dcf319, + 0x7533d928, 0xb155fdf5, 0x03563482, 0x8aba3cbb, + 0x28517711, 0xc20ad9f8, 0xabcc5167, 0xccad925f, + 0x4de81751, 0x3830dc8e, 0x379d5862, 0x9320f991, + 0xea7a90c2, 0xfb3e7bce, 0x5121ce64, 0x774fbe32, + 0xa8b6e37e, 0xc3293d46, 0x48de5369, 0x6413e680, + 0xa2ae0810, 0xdd6db224, 0x69852dfd, 0x09072166, + 0xb39a460a, 0x6445c0dd, 0x586cdecf, 0x1c20c8ae, + 0x5bbef7dd, 0x1b588d40, 0xccd2017f, 0x6bb4e3bb, + 0xdda26a7e, 0x3a59ff45, 0x3e350a44, 0xbcb4cdd5, + 0x72eacea8, 0xfa6484bb, 0x8d6612ae, 0xbf3c6f47, + 0xd29be463, 0x542f5d9e, 0xaec2771b, 0xf64e6370, + 0x740e0d8d, 0xe75b1357, 0xf8721671, 0xaf537d5d, + 0x4040cb08, 0x4eb4e2cc, 0x34d2466a, 0x0115af84, + 0xe1b00428, 0x95983a1d, 0x06b89fb4, 0xce6ea048, + 0x6f3f3b82, 0x3520ab82, 0x011a1d4b, 0x277227f8, + 0x611560b1, 0xe7933fdc, 0xbb3a792b, 0x344525bd, + 0xa08839e1, 0x51ce794b, 0x2f32c9b7, 0xa01fbac9, + 0xe01cc87e, 0xbcc7d1f6, 0xcf0111c3, 0xa1e8aac7, + 0x1a908749, 0xd44fbd9a, 0xd0dadecb, 0xd50ada38, + 0x0339c32a, 0xc6913667, 0x8df9317c, 0xe0b12b4f, + 0xf79e59b7, 0x43f5bb3a, 0xf2d519ff, 0x27d9459c, + 0xbf97222c, 0x15e6fc2a, 0x0f91fc71, 0x9b941525, + 0xfae59361, 0xceb69ceb, 0xc2a86459, 0x12baa8d1, + 0xb6c1075e, 0xe3056a0c, 0x10d25065, 0xcb03a442, + 0xe0ec6e0e, 0x1698db3b, 0x4c98a0be, 0x3278e964, + 0x9f1f9532, 0xe0d392df, 0xd3a0342b, 0x8971f21e, + 0x1b0a7441, 0x4ba3348c, 0xc5be7120, 0xc37632d8, + 0xdf359f8d, 0x9b992f2e, 0xe60b6f47, 0x0fe3f11d, + 0xe54cda54, 0x1edad891, 0xce6279cf, 0xcd3e7e6f, + 0x1618b166, 0xfd2c1d05, 0x848fd2c5, 0xf6fb2299, + 0xf523f357, 0xa6327623, 0x93a83531, 0x56cccd02, + 0xacf08162, 0x5a75ebb5, 0x6e163697, 0x88d273cc, + 0xde966292, 0x81b949d0, 0x4c50901b, 0x71c65614, + 0xe6c6c7bd, 0x327a140a, 0x45e1d006, 0xc3f27b9a, + 0xc9aa53fd, 0x62a80f00, 0xbb25bfe2, 0x35bdd2f6, + 0x71126905, 0xb2040222, 0xb6cbcf7c, 0xcd769c2b, + 0x53113ec0, 0x1640e3d3, 0x38abbd60, 0x2547adf0, + 0xba38209c, 0xf746ce76, 0x77afa1c5, 0x20756060, + 0x85cbfe4e, 0x8ae88dd8, 0x7aaaf9b0, 0x4cf9aa7e, + 0x1948c25c, 0x02fb8a8c, 0x01c36ae4, 0xd6ebe1f9, + 0x90d4f869, 0xa65cdea0, 0x3f09252d, 0xc208e69f, + 0xb74e6132, 0xce77e25b, 0x578fdfe3, 0x3ac372e6]) + ]; + this.P = new Uint32Array([ + 0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344, + 0xa4093822, 0x299f31d0, 0x082efa98, 0xec4e6c89, + 0x452821e6, 0x38d01377, 0xbe5466cf, 0x34e90c6c, + 0xc0ac29b7, 0xc97c50dd, 0x3f84d5b5, 0xb5470917, + 0x9216d5d9, 0x8979fb1b]); +}; - if (headers['X-Amz-Date'] || headers['x-amz-date']) - this.datetime = headers['X-Amz-Date'] || headers['x-amz-date'] - else - headers['X-Amz-Date'] = this.getDateTime() - } +function F(S, x8, i) { + return (((S[0][x8[i+3]] + + S[1][x8[i+2]]) ^ + S[2][x8[i+1]]) + + S[3][x8[i]]); +}; - delete headers.Authorization - delete headers.authorization +Blowfish.prototype.encipher = function(x, x8) { + if (x8 === undefined) { + x8 = new Uint8Array(x.buffer); + if (x.byteOffset !== 0) + x8 = x8.subarray(x.byteOffset); } -} - -RequestSigner.prototype.sign = function() { - if (!this.parsedPath) this.prepareRequest() - - if (this.request.signQuery) { - this.parsedPath.query['X-Amz-Signature'] = this.signature() - } else { - this.request.headers.Authorization = this.authHeader() + x[0] ^= this.P[0]; + for (var i = 1; i < 16; i += 2) { + x[1] ^= F(this.S, x8, 0) ^ this.P[i]; + x[0] ^= F(this.S, x8, 4) ^ this.P[i+1]; } + var t = x[0]; + x[0] = x[1] ^ this.P[17]; + x[1] = t; +}; - this.request.path = this.formatPath() - - return this.request -} - -RequestSigner.prototype.getDateTime = function() { - if (!this.datetime) { - var headers = this.request.headers, - date = new Date(headers.Date || headers.date || new Date) - - this.datetime = date.toISOString().replace(/[:\-]|\.\d{3}/g, '') - - // Remove the trailing 'Z' on the timestamp string for CodeCommit git access - if (this.isCodeCommitGit) this.datetime = this.datetime.slice(0, -1) +Blowfish.prototype.decipher = function(x) { + var x8 = new Uint8Array(x.buffer); + if (x.byteOffset !== 0) + x8 = x8.subarray(x.byteOffset); + x[0] ^= this.P[17]; + for (var i = 16; i > 0; i -= 2) { + x[1] ^= F(this.S, x8, 0) ^ this.P[i]; + x[0] ^= F(this.S, x8, 4) ^ this.P[i-1]; } - return this.datetime -} - -RequestSigner.prototype.getDate = function() { - return this.getDateTime().substr(0, 8) -} - -RequestSigner.prototype.authHeader = function() { - return [ - 'AWS4-HMAC-SHA256 Credential=' + this.credentials.accessKeyId + '/' + this.credentialString(), - 'SignedHeaders=' + this.signedHeaders(), - 'Signature=' + this.signature(), - ].join(', ') -} + var t = x[0]; + x[0] = x[1] ^ this.P[0]; + x[1] = t; +}; -RequestSigner.prototype.signature = function() { - var date = this.getDate(), - cacheKey = [this.credentials.secretAccessKey, date, this.region, this.service].join(), - kDate, kRegion, kService, kCredentials = credentialsCache.get(cacheKey) - if (!kCredentials) { - kDate = hmac('AWS4' + this.credentials.secretAccessKey, date) - kRegion = hmac(kDate, this.region) - kService = hmac(kRegion, this.service) - kCredentials = hmac(kService, 'aws4_request') - credentialsCache.set(cacheKey, kCredentials) +function stream2word(data, databytes){ + var i, temp = 0; + for (i = 0; i < 4; i++, BLF_J++) { + if (BLF_J >= databytes) BLF_J = 0; + temp = (temp << 8) | data[BLF_J]; } - return hmac(kCredentials, this.stringToSign(), 'hex') -} - -RequestSigner.prototype.stringToSign = function() { - return [ - 'AWS4-HMAC-SHA256', - this.getDateTime(), - this.credentialString(), - hash(this.canonicalString(), 'hex'), - ].join('\n') -} - -RequestSigner.prototype.canonicalString = function() { - if (!this.parsedPath) this.prepareRequest() - - var pathStr = this.parsedPath.path, - query = this.parsedPath.query, - headers = this.request.headers, - queryStr = '', - normalizePath = this.service !== 's3', - decodePath = this.service === 's3' || this.request.doNotEncodePath, - decodeSlashesInPath = this.service === 's3', - firstValOnly = this.service === 's3', - bodyHash + return temp; +}; - if (this.service === 's3' && this.request.signQuery) { - bodyHash = 'UNSIGNED-PAYLOAD' - } else if (this.isCodeCommitGit) { - bodyHash = '' - } else { - bodyHash = headers['X-Amz-Content-Sha256'] || headers['x-amz-content-sha256'] || - hash(this.request.body || '', 'hex') - } +Blowfish.prototype.expand0state = function(key, keybytes) { + var d = new Uint32Array(2), i, k; + var d8 = new Uint8Array(d.buffer); - if (query) { - var reducedQuery = Object.keys(query).reduce(function(obj, key) { - if (!key) return obj - obj[encodeRfc3986Full(key)] = !Array.isArray(query[key]) ? query[key] : - (firstValOnly ? query[key][0] : query[key]) - return obj - }, {}) - var encodedQueryPieces = [] - Object.keys(reducedQuery).sort().forEach(function(key) { - if (!Array.isArray(reducedQuery[key])) { - encodedQueryPieces.push(key + '=' + encodeRfc3986Full(reducedQuery[key])) - } else { - reducedQuery[key].map(encodeRfc3986Full).sort() - .forEach(function(val) { encodedQueryPieces.push(key + '=' + val) }) - } - }) - queryStr = encodedQueryPieces.join('&') - } - if (pathStr !== '/') { - if (normalizePath) pathStr = pathStr.replace(/\/{2,}/g, '/') - pathStr = pathStr.split('/').reduce(function(path, piece) { - if (normalizePath && piece === '..') { - path.pop() - } else if (!normalizePath || piece !== '.') { - if (decodePath) piece = decodeURIComponent(piece.replace(/\+/g, ' ')) - path.push(encodeRfc3986Full(piece)) - } - return path - }, []).join('/') - if (pathStr[0] !== '/') pathStr = '/' + pathStr - if (decodeSlashesInPath) pathStr = pathStr.replace(/%2F/g, '/') + for (i = 0, BLF_J = 0; i < 18; i++) { + this.P[i] ^= stream2word(key, keybytes); } + BLF_J = 0; - return [ - this.request.method || 'GET', - pathStr, - queryStr, - this.canonicalHeaders() + '\n', - this.signedHeaders(), - bodyHash, - ].join('\n') -} - -RequestSigner.prototype.canonicalHeaders = function() { - var headers = this.request.headers - function trimAll(header) { - return header.toString().trim().replace(/\s+/g, ' ') + for (i = 0; i < 18; i += 2) { + this.encipher(d, d8); + this.P[i] = d[0]; + this.P[i+1] = d[1]; } - return Object.keys(headers) - .filter(function(key) { return HEADERS_TO_IGNORE[key.toLowerCase()] == null }) - .sort(function(a, b) { return a.toLowerCase() < b.toLowerCase() ? -1 : 1 }) - .map(function(key) { return key.toLowerCase() + ':' + trimAll(headers[key]) }) - .join('\n') -} - -RequestSigner.prototype.signedHeaders = function() { - return Object.keys(this.request.headers) - .map(function(key) { return key.toLowerCase() }) - .filter(function(key) { return HEADERS_TO_IGNORE[key] == null }) - .sort() - .join(';') -} - -RequestSigner.prototype.credentialString = function() { - return [ - this.getDate(), - this.region, - this.service, - 'aws4_request', - ].join('/') -} -RequestSigner.prototype.defaultCredentials = function() { - var env = process.env - return { - accessKeyId: env.AWS_ACCESS_KEY_ID || env.AWS_ACCESS_KEY, - secretAccessKey: env.AWS_SECRET_ACCESS_KEY || env.AWS_SECRET_KEY, - sessionToken: env.AWS_SESSION_TOKEN, + for (i = 0; i < 4; i++) { + for (k = 0; k < 256; k += 2) { + this.encipher(d, d8); + this.S[i][k] = d[0]; + this.S[i][k+1] = d[1]; + } } -} +}; -RequestSigner.prototype.parsePath = function() { - var path = this.request.path || '/' +Blowfish.prototype.expandstate = function(data, databytes, key, keybytes) { + var d = new Uint32Array(2), i, k; - // S3 doesn't always encode characters > 127 correctly and - // all services don't encode characters > 255 correctly - // So if there are non-reserved chars (and it's not already all % encoded), just encode them all - if (/[^0-9A-Za-z;,/?:@&=+$\-_.!~*'()#%]/.test(path)) { - path = encodeURI(decodeURI(path)) + for (i = 0, BLF_J = 0; i < 18; i++) { + this.P[i] ^= stream2word(key, keybytes); } - var queryIx = path.indexOf('?'), - query = null - - if (queryIx >= 0) { - query = querystring.parse(path.slice(queryIx + 1)) - path = path.slice(0, queryIx) + for (i = 0, BLF_J = 0; i < 18; i += 2) { + d[0] ^= stream2word(data, databytes); + d[1] ^= stream2word(data, databytes); + this.encipher(d); + this.P[i] = d[0]; + this.P[i+1] = d[1]; } - this.parsedPath = { - path: path, - query: query, + for (i = 0; i < 4; i++) { + for (k = 0; k < 256; k += 2) { + d[0] ^= stream2word(data, databytes); + d[1] ^= stream2word(data, databytes); + this.encipher(d); + this.S[i][k] = d[0]; + this.S[i][k+1] = d[1]; + } } -} - -RequestSigner.prototype.formatPath = function() { - var path = this.parsedPath.path, - query = this.parsedPath.query - - if (!query) return path - - // Services don't support empty query string keys - if (query[''] != null) delete query[''] - - return path + '?' + encodeRfc3986(querystring.stringify(query)) -} - -aws4.RequestSigner = RequestSigner - -aws4.sign = function(request, credentials) { - return new RequestSigner(request, credentials).sign() -} - - -/***/ }), - -/***/ 74225: -/***/ ((module) => { - -module.exports = function(size) { - return new LruCache(size) -} - -function LruCache(size) { - this.capacity = size | 0 - this.map = Object.create(null) - this.list = new DoublyLinkedList() -} - -LruCache.prototype.get = function(key) { - var node = this.map[key] - if (node == null) return undefined - this.used(node) - return node.val -} + BLF_J = 0; +}; -LruCache.prototype.set = function(key, val) { - var node = this.map[key] - if (node != null) { - node.val = val - } else { - if (!this.capacity) this.prune() - if (!this.capacity) return false - node = new DoublyLinkedNode(key, val) - this.map[key] = node - this.capacity-- +Blowfish.prototype.enc = function(data, blocks) { + for (var i = 0; i < blocks; i++) { + this.encipher(data.subarray(i*2)); } - this.used(node) - return true -} - -LruCache.prototype.used = function(node) { - this.list.moveToFront(node) -} +}; -LruCache.prototype.prune = function() { - var node = this.list.pop() - if (node != null) { - delete this.map[node.key] - this.capacity++ +Blowfish.prototype.dec = function(data, blocks) { + for (var i = 0; i < blocks; i++) { + this.decipher(data.subarray(i*2)); } -} - - -function DoublyLinkedList() { - this.firstNode = null - this.lastNode = null -} +}; -DoublyLinkedList.prototype.moveToFront = function(node) { - if (this.firstNode == node) return +var BCRYPT_BLOCKS = 8, + BCRYPT_HASHSIZE = 32; - this.remove(node) +function bcrypt_hash(sha2pass, sha2salt, out) { + var state = new Blowfish(), + cdata = new Uint32Array(BCRYPT_BLOCKS), i, + ciphertext = new Uint8Array([79,120,121,99,104,114,111,109,97,116,105, + 99,66,108,111,119,102,105,115,104,83,119,97,116,68,121,110,97,109, + 105,116,101]); //"OxychromaticBlowfishSwatDynamite" - if (this.firstNode == null) { - this.firstNode = node - this.lastNode = node - node.prev = null - node.next = null - } else { - node.prev = null - node.next = this.firstNode - node.next.prev = node - this.firstNode = node + state.expandstate(sha2salt, 64, sha2pass, 64); + for (i = 0; i < 64; i++) { + state.expand0state(sha2salt, 64); + state.expand0state(sha2pass, 64); } -} -DoublyLinkedList.prototype.pop = function() { - var lastNode = this.lastNode - if (lastNode != null) { - this.remove(lastNode) - } - return lastNode -} + for (i = 0; i < BCRYPT_BLOCKS; i++) + cdata[i] = stream2word(ciphertext, ciphertext.byteLength); + for (i = 0; i < 64; i++) + state.enc(cdata, cdata.byteLength / 8); -DoublyLinkedList.prototype.remove = function(node) { - if (this.firstNode == node) { - this.firstNode = node.next - } else if (node.prev != null) { - node.prev.next = node.next - } - if (this.lastNode == node) { - this.lastNode = node.prev - } else if (node.next != null) { - node.next.prev = node.prev + for (i = 0; i < BCRYPT_BLOCKS; i++) { + out[4*i+3] = cdata[i] >>> 24; + out[4*i+2] = cdata[i] >>> 16; + out[4*i+1] = cdata[i] >>> 8; + out[4*i+0] = cdata[i]; } -} - - -function DoublyLinkedNode(key, val) { - this.key = key - this.val = val - this.prev = null - this.next = null -} - - -/***/ }), - -/***/ 96545: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +}; -module.exports = __nccwpck_require__(52618); +function bcrypt_pbkdf(pass, passlen, salt, saltlen, key, keylen, rounds) { + var sha2pass = new Uint8Array(64), + sha2salt = new Uint8Array(64), + out = new Uint8Array(BCRYPT_HASHSIZE), + tmpout = new Uint8Array(BCRYPT_HASHSIZE), + countsalt = new Uint8Array(saltlen+4), + i, j, amt, stride, dest, count, + origkeylen = keylen; -/***/ }), + if (rounds < 1) + return -1; + if (passlen === 0 || saltlen === 0 || keylen === 0 || + keylen > (out.byteLength * out.byteLength) || saltlen > (1<<20)) + return -1; -/***/ 68104: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + stride = Math.floor((keylen + out.byteLength - 1) / out.byteLength); + amt = Math.floor((keylen + stride - 1) / stride); -"use strict"; + for (i = 0; i < saltlen; i++) + countsalt[i] = salt[i]; + crypto_hash_sha512(sha2pass, pass, passlen); -var utils = __nccwpck_require__(20328); -var settle = __nccwpck_require__(13211); -var buildFullPath = __nccwpck_require__(41934); -var buildURL = __nccwpck_require__(30646); -var http = __nccwpck_require__(98605); -var https = __nccwpck_require__(57211); -var httpFollow = __nccwpck_require__(67707).http; -var httpsFollow = __nccwpck_require__(67707).https; -var url = __nccwpck_require__(78835); -var zlib = __nccwpck_require__(78761); -var pkg = __nccwpck_require__(20696); -var createError = __nccwpck_require__(15226); -var enhanceError = __nccwpck_require__(21516); + for (count = 1; keylen > 0; count++) { + countsalt[saltlen+0] = count >>> 24; + countsalt[saltlen+1] = count >>> 16; + countsalt[saltlen+2] = count >>> 8; + countsalt[saltlen+3] = count; -var isHttps = /https:?/; + crypto_hash_sha512(sha2salt, countsalt, saltlen + 4); + bcrypt_hash(sha2pass, sha2salt, tmpout); + for (i = out.byteLength; i--;) + out[i] = tmpout[i]; -/** - * - * @param {http.ClientRequestArgs} options - * @param {AxiosProxyConfig} proxy - * @param {string} location - */ -function setProxy(options, proxy, location) { - options.hostname = proxy.host; - options.host = proxy.host; - options.port = proxy.port; - options.path = location; + for (i = 1; i < rounds; i++) { + crypto_hash_sha512(sha2salt, tmpout, tmpout.byteLength); + bcrypt_hash(sha2pass, sha2salt, tmpout); + for (j = 0; j < out.byteLength; j++) + out[j] ^= tmpout[j]; + } - // Basic proxy authorization - if (proxy.auth) { - var base64 = Buffer.from(proxy.auth.username + ':' + proxy.auth.password, 'utf8').toString('base64'); - options.headers['Proxy-Authorization'] = 'Basic ' + base64; + amt = Math.min(amt, keylen); + for (i = 0; i < amt; i++) { + dest = i * stride + (count - 1); + if (dest >= origkeylen) + break; + key[dest] = out[i]; + } + keylen -= i; } - // If a proxy is used, any redirects must also pass through the proxy - options.beforeRedirect = function beforeRedirect(redirection) { - redirection.headers.host = redirection.host; - setProxy(redirection, proxy, redirection.href); - }; -} + return 0; +}; -/*eslint consistent-return:0*/ -module.exports = function httpAdapter(config) { - return new Promise(function dispatchHttpRequest(resolvePromise, rejectPromise) { - var resolve = function resolve(value) { - resolvePromise(value); - }; - var reject = function reject(value) { - rejectPromise(value); - }; - var data = config.data; - var headers = config.headers; +module.exports = { + BLOCKS: BCRYPT_BLOCKS, + HASHSIZE: BCRYPT_HASHSIZE, + hash: bcrypt_hash, + pbkdf: bcrypt_pbkdf +}; - // Set User-Agent (required by some servers) - // Only set header if it hasn't been set in config - // See https://github.com/axios/axios/issues/69 - if (!headers['User-Agent'] && !headers['user-agent']) { - headers['User-Agent'] = 'axios/' + pkg.version; - } - if (data && !utils.isStream(data)) { - if (Buffer.isBuffer(data)) { - // Nothing to do... - } else if (utils.isArrayBuffer(data)) { - data = Buffer.from(new Uint8Array(data)); - } else if (utils.isString(data)) { - data = Buffer.from(data, 'utf-8'); - } else { - return reject(createError( - 'Data after transformation must be a string, an ArrayBuffer, a Buffer, or a Stream', - config - )); - } +/***/ }), - // Add Content-Length header if data exists - headers['Content-Length'] = data.length; - } - - // HTTP basic authentication - var auth = undefined; - if (config.auth) { - var username = config.auth.username || ''; - var password = config.auth.password || ''; - auth = username + ':' + password; - } - - // Parse url - var fullPath = buildFullPath(config.baseURL, config.url); - var parsed = url.parse(fullPath); - var protocol = parsed.protocol || 'http:'; - - if (!auth && parsed.auth) { - var urlAuth = parsed.auth.split(':'); - var urlUsername = urlAuth[0] || ''; - var urlPassword = urlAuth[1] || ''; - auth = urlUsername + ':' + urlPassword; - } - - if (auth) { - delete headers.Authorization; - } - - var isHttpsRequest = isHttps.test(protocol); - var agent = isHttpsRequest ? config.httpsAgent : config.httpAgent; - - var options = { - path: buildURL(parsed.path, config.params, config.paramsSerializer).replace(/^\?/, ''), - method: config.method.toUpperCase(), - headers: headers, - agent: agent, - agents: { http: config.httpAgent, https: config.httpsAgent }, - auth: auth - }; - - if (config.socketPath) { - options.socketPath = config.socketPath; - } else { - options.hostname = parsed.hostname; - options.port = parsed.port; - } - - var proxy = config.proxy; - if (!proxy && proxy !== false) { - var proxyEnv = protocol.slice(0, -1) + '_proxy'; - var proxyUrl = process.env[proxyEnv] || process.env[proxyEnv.toUpperCase()]; - if (proxyUrl) { - var parsedProxyUrl = url.parse(proxyUrl); - var noProxyEnv = process.env.no_proxy || process.env.NO_PROXY; - var shouldProxy = true; - - if (noProxyEnv) { - var noProxy = noProxyEnv.split(',').map(function trim(s) { - return s.trim(); - }); - - shouldProxy = !noProxy.some(function proxyMatch(proxyElement) { - if (!proxyElement) { - return false; - } - if (proxyElement === '*') { - return true; - } - if (proxyElement[0] === '.' && - parsed.hostname.substr(parsed.hostname.length - proxyElement.length) === proxyElement) { - return true; - } - - return parsed.hostname === proxyElement; - }); - } - - if (shouldProxy) { - proxy = { - host: parsedProxyUrl.hostname, - port: parsedProxyUrl.port, - protocol: parsedProxyUrl.protocol - }; - - if (parsedProxyUrl.auth) { - var proxyUrlAuth = parsedProxyUrl.auth.split(':'); - proxy.auth = { - username: proxyUrlAuth[0], - password: proxyUrlAuth[1] - }; - } - } - } - } - - if (proxy) { - options.headers.host = parsed.hostname + (parsed.port ? ':' + parsed.port : ''); - setProxy(options, proxy, protocol + '//' + parsed.hostname + (parsed.port ? ':' + parsed.port : '') + options.path); - } - - var transport; - var isHttpsProxy = isHttpsRequest && (proxy ? isHttps.test(proxy.protocol) : true); - if (config.transport) { - transport = config.transport; - } else if (config.maxRedirects === 0) { - transport = isHttpsProxy ? https : http; - } else { - if (config.maxRedirects) { - options.maxRedirects = config.maxRedirects; - } - transport = isHttpsProxy ? httpsFollow : httpFollow; - } - - if (config.maxBodyLength > -1) { - options.maxBodyLength = config.maxBodyLength; - } - - // Create the request - var req = transport.request(options, function handleResponse(res) { - if (req.aborted) return; - - // uncompress the response body transparently if required - var stream = res; - - // return the last request in case of redirects - var lastRequest = res.req || req; - - - // if no content, is HEAD request or decompress disabled we should not decompress - if (res.statusCode !== 204 && lastRequest.method !== 'HEAD' && config.decompress !== false) { - switch (res.headers['content-encoding']) { - /*eslint default-case:0*/ - case 'gzip': - case 'compress': - case 'deflate': - // add the unzipper to the body stream processing pipeline - stream = stream.pipe(zlib.createUnzip()); - - // remove the content-encoding in order to not confuse downstream operations - delete res.headers['content-encoding']; - break; - } - } - - var response = { - status: res.statusCode, - statusText: res.statusMessage, - headers: res.headers, - config: config, - request: lastRequest - }; - - if (config.responseType === 'stream') { - response.data = stream; - settle(resolve, reject, response); - } else { - var responseBuffer = []; - stream.on('data', function handleStreamData(chunk) { - responseBuffer.push(chunk); - - // make sure the content length is not over the maxContentLength if specified - if (config.maxContentLength > -1 && Buffer.concat(responseBuffer).length > config.maxContentLength) { - stream.destroy(); - reject(createError('maxContentLength size of ' + config.maxContentLength + ' exceeded', - config, null, lastRequest)); - } - }); - - stream.on('error', function handleStreamError(err) { - if (req.aborted) return; - reject(enhanceError(err, config, null, lastRequest)); - }); - - stream.on('end', function handleStreamEnd() { - var responseData = Buffer.concat(responseBuffer); - if (config.responseType !== 'arraybuffer') { - responseData = responseData.toString(config.responseEncoding); - if (!config.responseEncoding || config.responseEncoding === 'utf8') { - responseData = utils.stripBOM(responseData); - } - } - - response.data = responseData; - settle(resolve, reject, response); - }); - } - }); - - // Handle errors - req.on('error', function handleRequestError(err) { - if (req.aborted && err.code !== 'ERR_FR_TOO_MANY_REDIRECTS') return; - reject(enhanceError(err, config, null, req)); - }); - - // Handle request timeout - if (config.timeout) { - // Sometime, the response will be very slow, and does not respond, the connect event will be block by event loop system. - // And timer callback will be fired, and abort() will be invoked before connection, then get "socket hang up" and code ECONNRESET. - // At this time, if we have a large number of request, nodejs will hang up some socket on background. and the number will up and up. - // And then these socket which be hang up will devoring CPU little by little. - // ClientRequest.setTimeout will be fired on the specify milliseconds, and can make sure that abort() will be fired after connect. - req.setTimeout(config.timeout, function handleRequestTimeout() { - req.abort(); - reject(createError('timeout of ' + config.timeout + 'ms exceeded', config, 'ECONNABORTED', req)); - }); - } - - if (config.cancelToken) { - // Handle cancellation - config.cancelToken.promise.then(function onCanceled(cancel) { - if (req.aborted) return; - - req.abort(); - reject(cancel); - }); - } - - // Send the request - if (utils.isStream(data)) { - data.on('error', function handleStreamError(err) { - reject(enhanceError(err, config, null, req)); - }).pipe(req); - } else { - req.end(data); - } - }); -}; - - -/***/ }), - -/***/ 3454: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var utils = __nccwpck_require__(20328); -var settle = __nccwpck_require__(13211); -var cookies = __nccwpck_require__(21545); -var buildURL = __nccwpck_require__(30646); -var buildFullPath = __nccwpck_require__(41934); -var parseHeaders = __nccwpck_require__(86455); -var isURLSameOrigin = __nccwpck_require__(33608); -var createError = __nccwpck_require__(15226); - -module.exports = function xhrAdapter(config) { - return new Promise(function dispatchXhrRequest(resolve, reject) { - var requestData = config.data; - var requestHeaders = config.headers; - - if (utils.isFormData(requestData)) { - delete requestHeaders['Content-Type']; // Let the browser set it - } - - var request = new XMLHttpRequest(); - - // HTTP basic authentication - if (config.auth) { - var username = config.auth.username || ''; - var password = config.auth.password ? unescape(encodeURIComponent(config.auth.password)) : ''; - requestHeaders.Authorization = 'Basic ' + btoa(username + ':' + password); - } - - var fullPath = buildFullPath(config.baseURL, config.url); - request.open(config.method.toUpperCase(), buildURL(fullPath, config.params, config.paramsSerializer), true); - - // Set the request timeout in MS - request.timeout = config.timeout; - - // Listen for ready state - request.onreadystatechange = function handleLoad() { - if (!request || request.readyState !== 4) { - return; - } - - // The request errored out and we didn't get a response, this will be - // handled by onerror instead - // With one exception: request that using file: protocol, most browsers - // will return status as 0 even though it's a successful request - if (request.status === 0 && !(request.responseURL && request.responseURL.indexOf('file:') === 0)) { - return; - } - - // Prepare the response - var responseHeaders = 'getAllResponseHeaders' in request ? parseHeaders(request.getAllResponseHeaders()) : null; - var responseData = !config.responseType || config.responseType === 'text' ? request.responseText : request.response; - var response = { - data: responseData, - status: request.status, - statusText: request.statusText, - headers: responseHeaders, - config: config, - request: request - }; - - settle(resolve, reject, response); - - // Clean up request - request = null; - }; - - // Handle browser request cancellation (as opposed to a manual cancellation) - request.onabort = function handleAbort() { - if (!request) { - return; - } - - reject(createError('Request aborted', config, 'ECONNABORTED', request)); - - // Clean up request - request = null; - }; - - // Handle low level network errors - request.onerror = function handleError() { - // Real errors are hidden from us by the browser - // onerror should only fire if it's a network error - reject(createError('Network Error', config, null, request)); - - // Clean up request - request = null; - }; - - // Handle timeout - request.ontimeout = function handleTimeout() { - var timeoutErrorMessage = 'timeout of ' + config.timeout + 'ms exceeded'; - if (config.timeoutErrorMessage) { - timeoutErrorMessage = config.timeoutErrorMessage; - } - reject(createError(timeoutErrorMessage, config, 'ECONNABORTED', - request)); - - // Clean up request - request = null; - }; - - // Add xsrf header - // This is only done if running in a standard browser environment. - // Specifically not if we're in a web worker, or react-native. - if (utils.isStandardBrowserEnv()) { - // Add xsrf header - var xsrfValue = (config.withCredentials || isURLSameOrigin(fullPath)) && config.xsrfCookieName ? - cookies.read(config.xsrfCookieName) : - undefined; - - if (xsrfValue) { - requestHeaders[config.xsrfHeaderName] = xsrfValue; - } - } - - // Add headers to the request - if ('setRequestHeader' in request) { - utils.forEach(requestHeaders, function setRequestHeader(val, key) { - if (typeof requestData === 'undefined' && key.toLowerCase() === 'content-type') { - // Remove Content-Type if data is undefined - delete requestHeaders[key]; - } else { - // Otherwise add header to the request - request.setRequestHeader(key, val); - } - }); - } - - // Add withCredentials to request if needed - if (!utils.isUndefined(config.withCredentials)) { - request.withCredentials = !!config.withCredentials; - } - - // Add responseType to request if needed - if (config.responseType) { - try { - request.responseType = config.responseType; - } catch (e) { - // Expected DOMException thrown by browsers not compatible XMLHttpRequest Level 2. - // But, this can be suppressed for 'json' type as it can be parsed by default 'transformResponse' function. - if (config.responseType !== 'json') { - throw e; - } - } - } - - // Handle progress if needed - if (typeof config.onDownloadProgress === 'function') { - request.addEventListener('progress', config.onDownloadProgress); - } - - // Not all browsers support upload events - if (typeof config.onUploadProgress === 'function' && request.upload) { - request.upload.addEventListener('progress', config.onUploadProgress); - } - - if (config.cancelToken) { - // Handle cancellation - config.cancelToken.promise.then(function onCanceled(cancel) { - if (!request) { - return; - } - - request.abort(); - reject(cancel); - // Clean up request - request = null; - }); - } - - if (!requestData) { - requestData = null; - } - - // Send the request - request.send(requestData); - }); -}; - - -/***/ }), - -/***/ 52618: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var utils = __nccwpck_require__(20328); -var bind = __nccwpck_require__(77065); -var Axios = __nccwpck_require__(98178); -var mergeConfig = __nccwpck_require__(74831); -var defaults = __nccwpck_require__(98190); - -/** - * Create an instance of Axios - * - * @param {Object} defaultConfig The default config for the instance - * @return {Axios} A new instance of Axios - */ -function createInstance(defaultConfig) { - var context = new Axios(defaultConfig); - var instance = bind(Axios.prototype.request, context); - - // Copy axios.prototype to instance - utils.extend(instance, Axios.prototype, context); - - // Copy context to instance - utils.extend(instance, context); - - return instance; -} - -// Create the default instance to be exported -var axios = createInstance(defaults); - -// Expose Axios class to allow class inheritance -axios.Axios = Axios; - -// Factory for creating new instances -axios.create = function create(instanceConfig) { - return createInstance(mergeConfig(axios.defaults, instanceConfig)); -}; - -// Expose Cancel & CancelToken -axios.Cancel = __nccwpck_require__(98875); -axios.CancelToken = __nccwpck_require__(71587); -axios.isCancel = __nccwpck_require__(64057); - -// Expose all/spread -axios.all = function all(promises) { - return Promise.all(promises); -}; -axios.spread = __nccwpck_require__(74850); - -// Expose isAxiosError -axios.isAxiosError = __nccwpck_require__(60650); - -module.exports = axios; - -// Allow use of default import syntax in TypeScript -module.exports.default = axios; - - -/***/ }), - -/***/ 98875: -/***/ ((module) => { - -"use strict"; - - -/** - * A `Cancel` is an object that is thrown when an operation is canceled. - * - * @class - * @param {string=} message The message. - */ -function Cancel(message) { - this.message = message; -} - -Cancel.prototype.toString = function toString() { - return 'Cancel' + (this.message ? ': ' + this.message : ''); -}; - -Cancel.prototype.__CANCEL__ = true; - -module.exports = Cancel; - - -/***/ }), - -/***/ 71587: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var Cancel = __nccwpck_require__(98875); - -/** - * A `CancelToken` is an object that can be used to request cancellation of an operation. - * - * @class - * @param {Function} executor The executor function. - */ -function CancelToken(executor) { - if (typeof executor !== 'function') { - throw new TypeError('executor must be a function.'); - } - - var resolvePromise; - this.promise = new Promise(function promiseExecutor(resolve) { - resolvePromise = resolve; - }); - - var token = this; - executor(function cancel(message) { - if (token.reason) { - // Cancellation has already been requested - return; - } - - token.reason = new Cancel(message); - resolvePromise(token.reason); - }); -} - -/** - * Throws a `Cancel` if cancellation has been requested. - */ -CancelToken.prototype.throwIfRequested = function throwIfRequested() { - if (this.reason) { - throw this.reason; - } -}; - -/** - * Returns an object that contains a new `CancelToken` and a function that, when called, - * cancels the `CancelToken`. - */ -CancelToken.source = function source() { - var cancel; - var token = new CancelToken(function executor(c) { - cancel = c; - }); - return { - token: token, - cancel: cancel - }; -}; - -module.exports = CancelToken; - - -/***/ }), - -/***/ 64057: -/***/ ((module) => { - -"use strict"; - - -module.exports = function isCancel(value) { - return !!(value && value.__CANCEL__); -}; - - -/***/ }), - -/***/ 98178: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var utils = __nccwpck_require__(20328); -var buildURL = __nccwpck_require__(30646); -var InterceptorManager = __nccwpck_require__(3214); -var dispatchRequest = __nccwpck_require__(85062); -var mergeConfig = __nccwpck_require__(74831); - -/** - * Create a new instance of Axios - * - * @param {Object} instanceConfig The default config for the instance - */ -function Axios(instanceConfig) { - this.defaults = instanceConfig; - this.interceptors = { - request: new InterceptorManager(), - response: new InterceptorManager() - }; -} - -/** - * Dispatch a request - * - * @param {Object} config The config specific for this request (merged with this.defaults) - */ -Axios.prototype.request = function request(config) { - /*eslint no-param-reassign:0*/ - // Allow for axios('example/url'[, config]) a la fetch API - if (typeof config === 'string') { - config = arguments[1] || {}; - config.url = arguments[0]; - } else { - config = config || {}; - } - - config = mergeConfig(this.defaults, config); - - // Set config.method - if (config.method) { - config.method = config.method.toLowerCase(); - } else if (this.defaults.method) { - config.method = this.defaults.method.toLowerCase(); - } else { - config.method = 'get'; - } - - // Hook up interceptors middleware - var chain = [dispatchRequest, undefined]; - var promise = Promise.resolve(config); - - this.interceptors.request.forEach(function unshiftRequestInterceptors(interceptor) { - chain.unshift(interceptor.fulfilled, interceptor.rejected); - }); - - this.interceptors.response.forEach(function pushResponseInterceptors(interceptor) { - chain.push(interceptor.fulfilled, interceptor.rejected); - }); - - while (chain.length) { - promise = promise.then(chain.shift(), chain.shift()); - } - - return promise; -}; - -Axios.prototype.getUri = function getUri(config) { - config = mergeConfig(this.defaults, config); - return buildURL(config.url, config.params, config.paramsSerializer).replace(/^\?/, ''); -}; - -// Provide aliases for supported request methods -utils.forEach(['delete', 'get', 'head', 'options'], function forEachMethodNoData(method) { - /*eslint func-names:0*/ - Axios.prototype[method] = function(url, config) { - return this.request(mergeConfig(config || {}, { - method: method, - url: url, - data: (config || {}).data - })); - }; -}); - -utils.forEach(['post', 'put', 'patch'], function forEachMethodWithData(method) { - /*eslint func-names:0*/ - Axios.prototype[method] = function(url, data, config) { - return this.request(mergeConfig(config || {}, { - method: method, - url: url, - data: data - })); - }; -}); - -module.exports = Axios; - - -/***/ }), - -/***/ 3214: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var utils = __nccwpck_require__(20328); - -function InterceptorManager() { - this.handlers = []; -} - -/** - * Add a new interceptor to the stack - * - * @param {Function} fulfilled The function to handle `then` for a `Promise` - * @param {Function} rejected The function to handle `reject` for a `Promise` - * - * @return {Number} An ID used to remove interceptor later - */ -InterceptorManager.prototype.use = function use(fulfilled, rejected) { - this.handlers.push({ - fulfilled: fulfilled, - rejected: rejected - }); - return this.handlers.length - 1; -}; - -/** - * Remove an interceptor from the stack - * - * @param {Number} id The ID that was returned by `use` - */ -InterceptorManager.prototype.eject = function eject(id) { - if (this.handlers[id]) { - this.handlers[id] = null; - } -}; - -/** - * Iterate over all the registered interceptors - * - * This method is particularly useful for skipping over any - * interceptors that may have become `null` calling `eject`. - * - * @param {Function} fn The function to call for each interceptor - */ -InterceptorManager.prototype.forEach = function forEach(fn) { - utils.forEach(this.handlers, function forEachHandler(h) { - if (h !== null) { - fn(h); - } - }); -}; - -module.exports = InterceptorManager; - - -/***/ }), - -/***/ 41934: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var isAbsoluteURL = __nccwpck_require__(41301); -var combineURLs = __nccwpck_require__(57189); - -/** - * Creates a new URL by combining the baseURL with the requestedURL, - * only when the requestedURL is not already an absolute URL. - * If the requestURL is absolute, this function returns the requestedURL untouched. - * - * @param {string} baseURL The base URL - * @param {string} requestedURL Absolute or relative URL to combine - * @returns {string} The combined full path - */ -module.exports = function buildFullPath(baseURL, requestedURL) { - if (baseURL && !isAbsoluteURL(requestedURL)) { - return combineURLs(baseURL, requestedURL); - } - return requestedURL; -}; - - -/***/ }), - -/***/ 15226: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var enhanceError = __nccwpck_require__(21516); - -/** - * Create an Error with the specified message, config, error code, request and response. - * - * @param {string} message The error message. - * @param {Object} config The config. - * @param {string} [code] The error code (for example, 'ECONNABORTED'). - * @param {Object} [request] The request. - * @param {Object} [response] The response. - * @returns {Error} The created error. - */ -module.exports = function createError(message, config, code, request, response) { - var error = new Error(message); - return enhanceError(error, config, code, request, response); -}; - - -/***/ }), - -/***/ 85062: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var utils = __nccwpck_require__(20328); -var transformData = __nccwpck_require__(19812); -var isCancel = __nccwpck_require__(64057); -var defaults = __nccwpck_require__(98190); - -/** - * Throws a `Cancel` if cancellation has been requested. - */ -function throwIfCancellationRequested(config) { - if (config.cancelToken) { - config.cancelToken.throwIfRequested(); - } -} - -/** - * Dispatch a request to the server using the configured adapter. - * - * @param {object} config The config that is to be used for the request - * @returns {Promise} The Promise to be fulfilled - */ -module.exports = function dispatchRequest(config) { - throwIfCancellationRequested(config); - - // Ensure headers exist - config.headers = config.headers || {}; - - // Transform request data - config.data = transformData( - config.data, - config.headers, - config.transformRequest - ); - - // Flatten headers - config.headers = utils.merge( - config.headers.common || {}, - config.headers[config.method] || {}, - config.headers - ); - - utils.forEach( - ['delete', 'get', 'head', 'post', 'put', 'patch', 'common'], - function cleanHeaderConfig(method) { - delete config.headers[method]; - } - ); - - var adapter = config.adapter || defaults.adapter; - - return adapter(config).then(function onAdapterResolution(response) { - throwIfCancellationRequested(config); - - // Transform response data - response.data = transformData( - response.data, - response.headers, - config.transformResponse - ); - - return response; - }, function onAdapterRejection(reason) { - if (!isCancel(reason)) { - throwIfCancellationRequested(config); - - // Transform response data - if (reason && reason.response) { - reason.response.data = transformData( - reason.response.data, - reason.response.headers, - config.transformResponse - ); - } - } - - return Promise.reject(reason); - }); -}; - - -/***/ }), - -/***/ 21516: -/***/ ((module) => { - -"use strict"; - - -/** - * Update an Error with the specified config, error code, and response. - * - * @param {Error} error The error to update. - * @param {Object} config The config. - * @param {string} [code] The error code (for example, 'ECONNABORTED'). - * @param {Object} [request] The request. - * @param {Object} [response] The response. - * @returns {Error} The error. - */ -module.exports = function enhanceError(error, config, code, request, response) { - error.config = config; - if (code) { - error.code = code; - } - - error.request = request; - error.response = response; - error.isAxiosError = true; - - error.toJSON = function toJSON() { - return { - // Standard - message: this.message, - name: this.name, - // Microsoft - description: this.description, - number: this.number, - // Mozilla - fileName: this.fileName, - lineNumber: this.lineNumber, - columnNumber: this.columnNumber, - stack: this.stack, - // Axios - config: this.config, - code: this.code - }; - }; - return error; -}; - - -/***/ }), - -/***/ 74831: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var utils = __nccwpck_require__(20328); - -/** - * Config-specific merge-function which creates a new config-object - * by merging two configuration objects together. - * - * @param {Object} config1 - * @param {Object} config2 - * @returns {Object} New object resulting from merging config2 to config1 - */ -module.exports = function mergeConfig(config1, config2) { - // eslint-disable-next-line no-param-reassign - config2 = config2 || {}; - var config = {}; - - var valueFromConfig2Keys = ['url', 'method', 'data']; - var mergeDeepPropertiesKeys = ['headers', 'auth', 'proxy', 'params']; - var defaultToConfig2Keys = [ - 'baseURL', 'transformRequest', 'transformResponse', 'paramsSerializer', - 'timeout', 'timeoutMessage', 'withCredentials', 'adapter', 'responseType', 'xsrfCookieName', - 'xsrfHeaderName', 'onUploadProgress', 'onDownloadProgress', 'decompress', - 'maxContentLength', 'maxBodyLength', 'maxRedirects', 'transport', 'httpAgent', - 'httpsAgent', 'cancelToken', 'socketPath', 'responseEncoding' - ]; - var directMergeKeys = ['validateStatus']; - - function getMergedValue(target, source) { - if (utils.isPlainObject(target) && utils.isPlainObject(source)) { - return utils.merge(target, source); - } else if (utils.isPlainObject(source)) { - return utils.merge({}, source); - } else if (utils.isArray(source)) { - return source.slice(); - } - return source; - } - - function mergeDeepProperties(prop) { - if (!utils.isUndefined(config2[prop])) { - config[prop] = getMergedValue(config1[prop], config2[prop]); - } else if (!utils.isUndefined(config1[prop])) { - config[prop] = getMergedValue(undefined, config1[prop]); - } - } - - utils.forEach(valueFromConfig2Keys, function valueFromConfig2(prop) { - if (!utils.isUndefined(config2[prop])) { - config[prop] = getMergedValue(undefined, config2[prop]); - } - }); - - utils.forEach(mergeDeepPropertiesKeys, mergeDeepProperties); - - utils.forEach(defaultToConfig2Keys, function defaultToConfig2(prop) { - if (!utils.isUndefined(config2[prop])) { - config[prop] = getMergedValue(undefined, config2[prop]); - } else if (!utils.isUndefined(config1[prop])) { - config[prop] = getMergedValue(undefined, config1[prop]); - } - }); - - utils.forEach(directMergeKeys, function merge(prop) { - if (prop in config2) { - config[prop] = getMergedValue(config1[prop], config2[prop]); - } else if (prop in config1) { - config[prop] = getMergedValue(undefined, config1[prop]); - } - }); - - var axiosKeys = valueFromConfig2Keys - .concat(mergeDeepPropertiesKeys) - .concat(defaultToConfig2Keys) - .concat(directMergeKeys); - - var otherKeys = Object - .keys(config1) - .concat(Object.keys(config2)) - .filter(function filterAxiosKeys(key) { - return axiosKeys.indexOf(key) === -1; - }); - - utils.forEach(otherKeys, mergeDeepProperties); - - return config; -}; - - -/***/ }), - -/***/ 13211: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var createError = __nccwpck_require__(15226); - -/** - * Resolve or reject a Promise based on response status. - * - * @param {Function} resolve A function that resolves the promise. - * @param {Function} reject A function that rejects the promise. - * @param {object} response The response. - */ -module.exports = function settle(resolve, reject, response) { - var validateStatus = response.config.validateStatus; - if (!response.status || !validateStatus || validateStatus(response.status)) { - resolve(response); - } else { - reject(createError( - 'Request failed with status code ' + response.status, - response.config, - null, - response.request, - response - )); - } -}; - - -/***/ }), - -/***/ 19812: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var utils = __nccwpck_require__(20328); - -/** - * Transform the data for a request or a response - * - * @param {Object|String} data The data to be transformed - * @param {Array} headers The headers for the request or response - * @param {Array|Function} fns A single function or Array of functions - * @returns {*} The resulting transformed data - */ -module.exports = function transformData(data, headers, fns) { - /*eslint no-param-reassign:0*/ - utils.forEach(fns, function transform(fn) { - data = fn(data, headers); - }); - - return data; -}; - - -/***/ }), - -/***/ 98190: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var utils = __nccwpck_require__(20328); -var normalizeHeaderName = __nccwpck_require__(36240); - -var DEFAULT_CONTENT_TYPE = { - 'Content-Type': 'application/x-www-form-urlencoded' -}; - -function setContentTypeIfUnset(headers, value) { - if (!utils.isUndefined(headers) && utils.isUndefined(headers['Content-Type'])) { - headers['Content-Type'] = value; - } -} - -function getDefaultAdapter() { - var adapter; - if (typeof XMLHttpRequest !== 'undefined') { - // For browsers use XHR adapter - adapter = __nccwpck_require__(3454); - } else if (typeof process !== 'undefined' && Object.prototype.toString.call(process) === '[object process]') { - // For node use HTTP adapter - adapter = __nccwpck_require__(68104); - } - return adapter; -} - -var defaults = { - adapter: getDefaultAdapter(), - - transformRequest: [function transformRequest(data, headers) { - normalizeHeaderName(headers, 'Accept'); - normalizeHeaderName(headers, 'Content-Type'); - if (utils.isFormData(data) || - utils.isArrayBuffer(data) || - utils.isBuffer(data) || - utils.isStream(data) || - utils.isFile(data) || - utils.isBlob(data) - ) { - return data; - } - if (utils.isArrayBufferView(data)) { - return data.buffer; - } - if (utils.isURLSearchParams(data)) { - setContentTypeIfUnset(headers, 'application/x-www-form-urlencoded;charset=utf-8'); - return data.toString(); - } - if (utils.isObject(data)) { - setContentTypeIfUnset(headers, 'application/json;charset=utf-8'); - return JSON.stringify(data); - } - return data; - }], - - transformResponse: [function transformResponse(data) { - /*eslint no-param-reassign:0*/ - if (typeof data === 'string') { - try { - data = JSON.parse(data); - } catch (e) { /* Ignore */ } - } - return data; - }], - - /** - * A timeout in milliseconds to abort a request. If set to 0 (default) a - * timeout is not created. - */ - timeout: 0, - - xsrfCookieName: 'XSRF-TOKEN', - xsrfHeaderName: 'X-XSRF-TOKEN', - - maxContentLength: -1, - maxBodyLength: -1, - - validateStatus: function validateStatus(status) { - return status >= 200 && status < 300; - } -}; - -defaults.headers = { - common: { - 'Accept': 'application/json, text/plain, */*' - } -}; - -utils.forEach(['delete', 'get', 'head'], function forEachMethodNoData(method) { - defaults.headers[method] = {}; -}); - -utils.forEach(['post', 'put', 'patch'], function forEachMethodWithData(method) { - defaults.headers[method] = utils.merge(DEFAULT_CONTENT_TYPE); -}); - -module.exports = defaults; - - -/***/ }), - -/***/ 77065: -/***/ ((module) => { - -"use strict"; - - -module.exports = function bind(fn, thisArg) { - return function wrap() { - var args = new Array(arguments.length); - for (var i = 0; i < args.length; i++) { - args[i] = arguments[i]; - } - return fn.apply(thisArg, args); - }; -}; - - -/***/ }), - -/***/ 30646: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var utils = __nccwpck_require__(20328); - -function encode(val) { - return encodeURIComponent(val). - replace(/%3A/gi, ':'). - replace(/%24/g, '$'). - replace(/%2C/gi, ','). - replace(/%20/g, '+'). - replace(/%5B/gi, '['). - replace(/%5D/gi, ']'); -} - -/** - * Build a URL by appending params to the end - * - * @param {string} url The base of the url (e.g., http://www.google.com) - * @param {object} [params] The params to be appended - * @returns {string} The formatted url - */ -module.exports = function buildURL(url, params, paramsSerializer) { - /*eslint no-param-reassign:0*/ - if (!params) { - return url; - } - - var serializedParams; - if (paramsSerializer) { - serializedParams = paramsSerializer(params); - } else if (utils.isURLSearchParams(params)) { - serializedParams = params.toString(); - } else { - var parts = []; - - utils.forEach(params, function serialize(val, key) { - if (val === null || typeof val === 'undefined') { - return; - } - - if (utils.isArray(val)) { - key = key + '[]'; - } else { - val = [val]; - } - - utils.forEach(val, function parseValue(v) { - if (utils.isDate(v)) { - v = v.toISOString(); - } else if (utils.isObject(v)) { - v = JSON.stringify(v); - } - parts.push(encode(key) + '=' + encode(v)); - }); - }); - - serializedParams = parts.join('&'); - } - - if (serializedParams) { - var hashmarkIndex = url.indexOf('#'); - if (hashmarkIndex !== -1) { - url = url.slice(0, hashmarkIndex); - } - - url += (url.indexOf('?') === -1 ? '?' : '&') + serializedParams; - } - - return url; -}; - - -/***/ }), - -/***/ 57189: -/***/ ((module) => { - -"use strict"; - - -/** - * Creates a new URL by combining the specified URLs - * - * @param {string} baseURL The base URL - * @param {string} relativeURL The relative URL - * @returns {string} The combined URL - */ -module.exports = function combineURLs(baseURL, relativeURL) { - return relativeURL - ? baseURL.replace(/\/+$/, '') + '/' + relativeURL.replace(/^\/+/, '') - : baseURL; -}; - - -/***/ }), - -/***/ 21545: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var utils = __nccwpck_require__(20328); - -module.exports = ( - utils.isStandardBrowserEnv() ? - - // Standard browser envs support document.cookie - (function standardBrowserEnv() { - return { - write: function write(name, value, expires, path, domain, secure) { - var cookie = []; - cookie.push(name + '=' + encodeURIComponent(value)); - - if (utils.isNumber(expires)) { - cookie.push('expires=' + new Date(expires).toGMTString()); - } - - if (utils.isString(path)) { - cookie.push('path=' + path); - } - - if (utils.isString(domain)) { - cookie.push('domain=' + domain); - } - - if (secure === true) { - cookie.push('secure'); - } - - document.cookie = cookie.join('; '); - }, - - read: function read(name) { - var match = document.cookie.match(new RegExp('(^|;\\s*)(' + name + ')=([^;]*)')); - return (match ? decodeURIComponent(match[3]) : null); - }, - - remove: function remove(name) { - this.write(name, '', Date.now() - 86400000); - } - }; - })() : - - // Non standard browser env (web workers, react-native) lack needed support. - (function nonStandardBrowserEnv() { - return { - write: function write() {}, - read: function read() { return null; }, - remove: function remove() {} - }; - })() -); - - -/***/ }), - -/***/ 41301: -/***/ ((module) => { - -"use strict"; - - -/** - * Determines whether the specified URL is absolute - * - * @param {string} url The URL to test - * @returns {boolean} True if the specified URL is absolute, otherwise false - */ -module.exports = function isAbsoluteURL(url) { - // A URL is considered absolute if it begins with "://" or "//" (protocol-relative URL). - // RFC 3986 defines scheme name as a sequence of characters beginning with a letter and followed - // by any combination of letters, digits, plus, period, or hyphen. - return /^([a-z][a-z\d\+\-\.]*:)?\/\//i.test(url); -}; - - -/***/ }), - -/***/ 60650: -/***/ ((module) => { - -"use strict"; - - -/** - * Determines whether the payload is an error thrown by Axios - * - * @param {*} payload The value to test - * @returns {boolean} True if the payload is an error thrown by Axios, otherwise false - */ -module.exports = function isAxiosError(payload) { - return (typeof payload === 'object') && (payload.isAxiosError === true); -}; - - -/***/ }), - -/***/ 33608: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var utils = __nccwpck_require__(20328); - -module.exports = ( - utils.isStandardBrowserEnv() ? - - // Standard browser envs have full support of the APIs needed to test - // whether the request URL is of the same origin as current location. - (function standardBrowserEnv() { - var msie = /(msie|trident)/i.test(navigator.userAgent); - var urlParsingNode = document.createElement('a'); - var originURL; - - /** - * Parse a URL to discover it's components - * - * @param {String} url The URL to be parsed - * @returns {Object} - */ - function resolveURL(url) { - var href = url; - - if (msie) { - // IE needs attribute set twice to normalize properties - urlParsingNode.setAttribute('href', href); - href = urlParsingNode.href; - } - - urlParsingNode.setAttribute('href', href); - - // urlParsingNode provides the UrlUtils interface - http://url.spec.whatwg.org/#urlutils - return { - href: urlParsingNode.href, - protocol: urlParsingNode.protocol ? urlParsingNode.protocol.replace(/:$/, '') : '', - host: urlParsingNode.host, - search: urlParsingNode.search ? urlParsingNode.search.replace(/^\?/, '') : '', - hash: urlParsingNode.hash ? urlParsingNode.hash.replace(/^#/, '') : '', - hostname: urlParsingNode.hostname, - port: urlParsingNode.port, - pathname: (urlParsingNode.pathname.charAt(0) === '/') ? - urlParsingNode.pathname : - '/' + urlParsingNode.pathname - }; - } - - originURL = resolveURL(window.location.href); - - /** - * Determine if a URL shares the same origin as the current location - * - * @param {String} requestURL The URL to test - * @returns {boolean} True if URL shares the same origin, otherwise false - */ - return function isURLSameOrigin(requestURL) { - var parsed = (utils.isString(requestURL)) ? resolveURL(requestURL) : requestURL; - return (parsed.protocol === originURL.protocol && - parsed.host === originURL.host); - }; - })() : - - // Non standard browser envs (web workers, react-native) lack needed support. - (function nonStandardBrowserEnv() { - return function isURLSameOrigin() { - return true; - }; - })() -); - - -/***/ }), - -/***/ 36240: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var utils = __nccwpck_require__(20328); - -module.exports = function normalizeHeaderName(headers, normalizedName) { - utils.forEach(headers, function processHeader(value, name) { - if (name !== normalizedName && name.toUpperCase() === normalizedName.toUpperCase()) { - headers[normalizedName] = value; - delete headers[name]; - } - }); -}; - - -/***/ }), - -/***/ 86455: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var utils = __nccwpck_require__(20328); - -// Headers whose duplicates are ignored by node -// c.f. https://nodejs.org/api/http.html#http_message_headers -var ignoreDuplicateOf = [ - 'age', 'authorization', 'content-length', 'content-type', 'etag', - 'expires', 'from', 'host', 'if-modified-since', 'if-unmodified-since', - 'last-modified', 'location', 'max-forwards', 'proxy-authorization', - 'referer', 'retry-after', 'user-agent' -]; - -/** - * Parse headers into an object - * - * ``` - * Date: Wed, 27 Aug 2014 08:58:49 GMT - * Content-Type: application/json - * Connection: keep-alive - * Transfer-Encoding: chunked - * ``` - * - * @param {String} headers Headers needing to be parsed - * @returns {Object} Headers parsed into an object - */ -module.exports = function parseHeaders(headers) { - var parsed = {}; - var key; - var val; - var i; - - if (!headers) { return parsed; } - - utils.forEach(headers.split('\n'), function parser(line) { - i = line.indexOf(':'); - key = utils.trim(line.substr(0, i)).toLowerCase(); - val = utils.trim(line.substr(i + 1)); - - if (key) { - if (parsed[key] && ignoreDuplicateOf.indexOf(key) >= 0) { - return; - } - if (key === 'set-cookie') { - parsed[key] = (parsed[key] ? parsed[key] : []).concat([val]); - } else { - parsed[key] = parsed[key] ? parsed[key] + ', ' + val : val; - } - } - }); - - return parsed; -}; - - -/***/ }), - -/***/ 74850: -/***/ ((module) => { - -"use strict"; - - -/** - * Syntactic sugar for invoking a function and expanding an array for arguments. - * - * Common use case would be to use `Function.prototype.apply`. - * - * ```js - * function f(x, y, z) {} - * var args = [1, 2, 3]; - * f.apply(null, args); - * ``` - * - * With `spread` this example can be re-written. - * - * ```js - * spread(function(x, y, z) {})([1, 2, 3]); - * ``` - * - * @param {Function} callback - * @returns {Function} - */ -module.exports = function spread(callback) { - return function wrap(arr) { - return callback.apply(null, arr); - }; -}; - - -/***/ }), - -/***/ 20328: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var bind = __nccwpck_require__(77065); - -/*global toString:true*/ - -// utils is a library of generic helper functions non-specific to axios - -var toString = Object.prototype.toString; - -/** - * Determine if a value is an Array - * - * @param {Object} val The value to test - * @returns {boolean} True if value is an Array, otherwise false - */ -function isArray(val) { - return toString.call(val) === '[object Array]'; -} - -/** - * Determine if a value is undefined - * - * @param {Object} val The value to test - * @returns {boolean} True if the value is undefined, otherwise false - */ -function isUndefined(val) { - return typeof val === 'undefined'; -} - -/** - * Determine if a value is a Buffer - * - * @param {Object} val The value to test - * @returns {boolean} True if value is a Buffer, otherwise false - */ -function isBuffer(val) { - return val !== null && !isUndefined(val) && val.constructor !== null && !isUndefined(val.constructor) - && typeof val.constructor.isBuffer === 'function' && val.constructor.isBuffer(val); -} - -/** - * Determine if a value is an ArrayBuffer - * - * @param {Object} val The value to test - * @returns {boolean} True if value is an ArrayBuffer, otherwise false - */ -function isArrayBuffer(val) { - return toString.call(val) === '[object ArrayBuffer]'; -} - -/** - * Determine if a value is a FormData - * - * @param {Object} val The value to test - * @returns {boolean} True if value is an FormData, otherwise false - */ -function isFormData(val) { - return (typeof FormData !== 'undefined') && (val instanceof FormData); -} - -/** - * Determine if a value is a view on an ArrayBuffer - * - * @param {Object} val The value to test - * @returns {boolean} True if value is a view on an ArrayBuffer, otherwise false - */ -function isArrayBufferView(val) { - var result; - if ((typeof ArrayBuffer !== 'undefined') && (ArrayBuffer.isView)) { - result = ArrayBuffer.isView(val); - } else { - result = (val) && (val.buffer) && (val.buffer instanceof ArrayBuffer); - } - return result; -} - -/** - * Determine if a value is a String - * - * @param {Object} val The value to test - * @returns {boolean} True if value is a String, otherwise false - */ -function isString(val) { - return typeof val === 'string'; -} - -/** - * Determine if a value is a Number - * - * @param {Object} val The value to test - * @returns {boolean} True if value is a Number, otherwise false - */ -function isNumber(val) { - return typeof val === 'number'; -} - -/** - * Determine if a value is an Object - * - * @param {Object} val The value to test - * @returns {boolean} True if value is an Object, otherwise false - */ -function isObject(val) { - return val !== null && typeof val === 'object'; -} - -/** - * Determine if a value is a plain Object - * - * @param {Object} val The value to test - * @return {boolean} True if value is a plain Object, otherwise false - */ -function isPlainObject(val) { - if (toString.call(val) !== '[object Object]') { - return false; - } - - var prototype = Object.getPrototypeOf(val); - return prototype === null || prototype === Object.prototype; -} - -/** - * Determine if a value is a Date - * - * @param {Object} val The value to test - * @returns {boolean} True if value is a Date, otherwise false - */ -function isDate(val) { - return toString.call(val) === '[object Date]'; -} - -/** - * Determine if a value is a File - * - * @param {Object} val The value to test - * @returns {boolean} True if value is a File, otherwise false - */ -function isFile(val) { - return toString.call(val) === '[object File]'; -} - -/** - * Determine if a value is a Blob - * - * @param {Object} val The value to test - * @returns {boolean} True if value is a Blob, otherwise false - */ -function isBlob(val) { - return toString.call(val) === '[object Blob]'; -} - -/** - * Determine if a value is a Function - * - * @param {Object} val The value to test - * @returns {boolean} True if value is a Function, otherwise false - */ -function isFunction(val) { - return toString.call(val) === '[object Function]'; -} - -/** - * Determine if a value is a Stream - * - * @param {Object} val The value to test - * @returns {boolean} True if value is a Stream, otherwise false - */ -function isStream(val) { - return isObject(val) && isFunction(val.pipe); -} - -/** - * Determine if a value is a URLSearchParams object - * - * @param {Object} val The value to test - * @returns {boolean} True if value is a URLSearchParams object, otherwise false - */ -function isURLSearchParams(val) { - return typeof URLSearchParams !== 'undefined' && val instanceof URLSearchParams; -} - -/** - * Trim excess whitespace off the beginning and end of a string - * - * @param {String} str The String to trim - * @returns {String} The String freed of excess whitespace - */ -function trim(str) { - return str.replace(/^\s*/, '').replace(/\s*$/, ''); -} - -/** - * Determine if we're running in a standard browser environment - * - * This allows axios to run in a web worker, and react-native. - * Both environments support XMLHttpRequest, but not fully standard globals. - * - * web workers: - * typeof window -> undefined - * typeof document -> undefined - * - * react-native: - * navigator.product -> 'ReactNative' - * nativescript - * navigator.product -> 'NativeScript' or 'NS' - */ -function isStandardBrowserEnv() { - if (typeof navigator !== 'undefined' && (navigator.product === 'ReactNative' || - navigator.product === 'NativeScript' || - navigator.product === 'NS')) { - return false; - } - return ( - typeof window !== 'undefined' && - typeof document !== 'undefined' - ); -} - -/** - * Iterate over an Array or an Object invoking a function for each item. - * - * If `obj` is an Array callback will be called passing - * the value, index, and complete array for each item. - * - * If 'obj' is an Object callback will be called passing - * the value, key, and complete object for each property. - * - * @param {Object|Array} obj The object to iterate - * @param {Function} fn The callback to invoke for each item - */ -function forEach(obj, fn) { - // Don't bother if no value provided - if (obj === null || typeof obj === 'undefined') { - return; - } - - // Force an array if not already something iterable - if (typeof obj !== 'object') { - /*eslint no-param-reassign:0*/ - obj = [obj]; - } - - if (isArray(obj)) { - // Iterate over array values - for (var i = 0, l = obj.length; i < l; i++) { - fn.call(null, obj[i], i, obj); - } - } else { - // Iterate over object keys - for (var key in obj) { - if (Object.prototype.hasOwnProperty.call(obj, key)) { - fn.call(null, obj[key], key, obj); - } - } - } -} - -/** - * Accepts varargs expecting each argument to be an object, then - * immutably merges the properties of each object and returns result. - * - * When multiple objects contain the same key the later object in - * the arguments list will take precedence. - * - * Example: - * - * ```js - * var result = merge({foo: 123}, {foo: 456}); - * console.log(result.foo); // outputs 456 - * ``` - * - * @param {Object} obj1 Object to merge - * @returns {Object} Result of all merge properties - */ -function merge(/* obj1, obj2, obj3, ... */) { - var result = {}; - function assignValue(val, key) { - if (isPlainObject(result[key]) && isPlainObject(val)) { - result[key] = merge(result[key], val); - } else if (isPlainObject(val)) { - result[key] = merge({}, val); - } else if (isArray(val)) { - result[key] = val.slice(); - } else { - result[key] = val; - } - } - - for (var i = 0, l = arguments.length; i < l; i++) { - forEach(arguments[i], assignValue); - } - return result; -} - -/** - * Extends object a by mutably adding to it the properties of object b. - * - * @param {Object} a The object to be extended - * @param {Object} b The object to copy properties from - * @param {Object} thisArg The object to bind function to - * @return {Object} The resulting value of object a - */ -function extend(a, b, thisArg) { - forEach(b, function assignValue(val, key) { - if (thisArg && typeof val === 'function') { - a[key] = bind(val, thisArg); - } else { - a[key] = val; - } - }); - return a; -} - -/** - * Remove byte order marker. This catches EF BB BF (the UTF-8 BOM) - * - * @param {string} content with BOM - * @return {string} content value without BOM - */ -function stripBOM(content) { - if (content.charCodeAt(0) === 0xFEFF) { - content = content.slice(1); - } - return content; -} - -module.exports = { - isArray: isArray, - isArrayBuffer: isArrayBuffer, - isBuffer: isBuffer, - isFormData: isFormData, - isArrayBufferView: isArrayBufferView, - isString: isString, - isNumber: isNumber, - isObject: isObject, - isPlainObject: isPlainObject, - isUndefined: isUndefined, - isDate: isDate, - isFile: isFile, - isBlob: isBlob, - isFunction: isFunction, - isStream: isStream, - isURLSearchParams: isURLSearchParams, - isStandardBrowserEnv: isStandardBrowserEnv, - forEach: forEach, - merge: merge, - extend: extend, - trim: trim, - stripBOM: stripBOM -}; - - -/***/ }), - -/***/ 9417: -/***/ ((module) => { - -"use strict"; - -module.exports = balanced; -function balanced(a, b, str) { - if (a instanceof RegExp) a = maybeMatch(a, str); - if (b instanceof RegExp) b = maybeMatch(b, str); - - var r = range(a, b, str); - - return r && { - start: r[0], - end: r[1], - pre: str.slice(0, r[0]), - body: str.slice(r[0] + a.length, r[1]), - post: str.slice(r[1] + b.length) - }; -} - -function maybeMatch(reg, str) { - var m = str.match(reg); - return m ? m[0] : null; -} - -balanced.range = range; -function range(a, b, str) { - var begs, beg, left, right, result; - var ai = str.indexOf(a); - var bi = str.indexOf(b, ai + 1); - var i = ai; - - if (ai >= 0 && bi > 0) { - if(a===b) { - return [ai, bi]; - } - begs = []; - left = str.length; - - while (i >= 0 && !result) { - if (i == ai) { - begs.push(i); - ai = str.indexOf(a, i + 1); - } else if (begs.length == 1) { - result = [ begs.pop(), bi ]; - } else { - beg = begs.pop(); - if (beg < left) { - left = beg; - right = bi; - } - - bi = str.indexOf(b, i + 1); - } - - i = ai < bi && ai >= 0 ? ai : bi; - } - - if (begs.length) { - result = [ left, right ]; - } - } - - return result; -} - - -/***/ }), - -/***/ 45447: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var crypto_hash_sha512 = __nccwpck_require__(68729).lowlevel.crypto_hash; - -/* - * This file is a 1:1 port from the OpenBSD blowfish.c and bcrypt_pbkdf.c. As a - * result, it retains the original copyright and license. The two files are - * under slightly different (but compatible) licenses, and are here combined in - * one file. - * - * Credit for the actual porting work goes to: - * Devi Mandiri - */ - -/* - * The Blowfish portions are under the following license: - * - * Blowfish block cipher for OpenBSD - * Copyright 1997 Niels Provos - * All rights reserved. - * - * Implementation advice by David Mazieres . - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions - * are met: - * 1. Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR - * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES - * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. - * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, - * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT - * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF - * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -/* - * The bcrypt_pbkdf portions are under the following license: - * - * Copyright (c) 2013 Ted Unangst - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES - * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR - * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES - * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN - * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF - * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - */ - -/* - * Performance improvements (Javascript-specific): - * - * Copyright 2016, Joyent Inc - * Author: Alex Wilson - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES - * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR - * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES - * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN - * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF - * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - */ - -// Ported from OpenBSD bcrypt_pbkdf.c v1.9 - -var BLF_J = 0; - -var Blowfish = function() { - this.S = [ - new Uint32Array([ - 0xd1310ba6, 0x98dfb5ac, 0x2ffd72db, 0xd01adfb7, - 0xb8e1afed, 0x6a267e96, 0xba7c9045, 0xf12c7f99, - 0x24a19947, 0xb3916cf7, 0x0801f2e2, 0x858efc16, - 0x636920d8, 0x71574e69, 0xa458fea3, 0xf4933d7e, - 0x0d95748f, 0x728eb658, 0x718bcd58, 0x82154aee, - 0x7b54a41d, 0xc25a59b5, 0x9c30d539, 0x2af26013, - 0xc5d1b023, 0x286085f0, 0xca417918, 0xb8db38ef, - 0x8e79dcb0, 0x603a180e, 0x6c9e0e8b, 0xb01e8a3e, - 0xd71577c1, 0xbd314b27, 0x78af2fda, 0x55605c60, - 0xe65525f3, 0xaa55ab94, 0x57489862, 0x63e81440, - 0x55ca396a, 0x2aab10b6, 0xb4cc5c34, 0x1141e8ce, - 0xa15486af, 0x7c72e993, 0xb3ee1411, 0x636fbc2a, - 0x2ba9c55d, 0x741831f6, 0xce5c3e16, 0x9b87931e, - 0xafd6ba33, 0x6c24cf5c, 0x7a325381, 0x28958677, - 0x3b8f4898, 0x6b4bb9af, 0xc4bfe81b, 0x66282193, - 0x61d809cc, 0xfb21a991, 0x487cac60, 0x5dec8032, - 0xef845d5d, 0xe98575b1, 0xdc262302, 0xeb651b88, - 0x23893e81, 0xd396acc5, 0x0f6d6ff3, 0x83f44239, - 0x2e0b4482, 0xa4842004, 0x69c8f04a, 0x9e1f9b5e, - 0x21c66842, 0xf6e96c9a, 0x670c9c61, 0xabd388f0, - 0x6a51a0d2, 0xd8542f68, 0x960fa728, 0xab5133a3, - 0x6eef0b6c, 0x137a3be4, 0xba3bf050, 0x7efb2a98, - 0xa1f1651d, 0x39af0176, 0x66ca593e, 0x82430e88, - 0x8cee8619, 0x456f9fb4, 0x7d84a5c3, 0x3b8b5ebe, - 0xe06f75d8, 0x85c12073, 0x401a449f, 0x56c16aa6, - 0x4ed3aa62, 0x363f7706, 0x1bfedf72, 0x429b023d, - 0x37d0d724, 0xd00a1248, 0xdb0fead3, 0x49f1c09b, - 0x075372c9, 0x80991b7b, 0x25d479d8, 0xf6e8def7, - 0xe3fe501a, 0xb6794c3b, 0x976ce0bd, 0x04c006ba, - 0xc1a94fb6, 0x409f60c4, 0x5e5c9ec2, 0x196a2463, - 0x68fb6faf, 0x3e6c53b5, 0x1339b2eb, 0x3b52ec6f, - 0x6dfc511f, 0x9b30952c, 0xcc814544, 0xaf5ebd09, - 0xbee3d004, 0xde334afd, 0x660f2807, 0x192e4bb3, - 0xc0cba857, 0x45c8740f, 0xd20b5f39, 0xb9d3fbdb, - 0x5579c0bd, 0x1a60320a, 0xd6a100c6, 0x402c7279, - 0x679f25fe, 0xfb1fa3cc, 0x8ea5e9f8, 0xdb3222f8, - 0x3c7516df, 0xfd616b15, 0x2f501ec8, 0xad0552ab, - 0x323db5fa, 0xfd238760, 0x53317b48, 0x3e00df82, - 0x9e5c57bb, 0xca6f8ca0, 0x1a87562e, 0xdf1769db, - 0xd542a8f6, 0x287effc3, 0xac6732c6, 0x8c4f5573, - 0x695b27b0, 0xbbca58c8, 0xe1ffa35d, 0xb8f011a0, - 0x10fa3d98, 0xfd2183b8, 0x4afcb56c, 0x2dd1d35b, - 0x9a53e479, 0xb6f84565, 0xd28e49bc, 0x4bfb9790, - 0xe1ddf2da, 0xa4cb7e33, 0x62fb1341, 0xcee4c6e8, - 0xef20cada, 0x36774c01, 0xd07e9efe, 0x2bf11fb4, - 0x95dbda4d, 0xae909198, 0xeaad8e71, 0x6b93d5a0, - 0xd08ed1d0, 0xafc725e0, 0x8e3c5b2f, 0x8e7594b7, - 0x8ff6e2fb, 0xf2122b64, 0x8888b812, 0x900df01c, - 0x4fad5ea0, 0x688fc31c, 0xd1cff191, 0xb3a8c1ad, - 0x2f2f2218, 0xbe0e1777, 0xea752dfe, 0x8b021fa1, - 0xe5a0cc0f, 0xb56f74e8, 0x18acf3d6, 0xce89e299, - 0xb4a84fe0, 0xfd13e0b7, 0x7cc43b81, 0xd2ada8d9, - 0x165fa266, 0x80957705, 0x93cc7314, 0x211a1477, - 0xe6ad2065, 0x77b5fa86, 0xc75442f5, 0xfb9d35cf, - 0xebcdaf0c, 0x7b3e89a0, 0xd6411bd3, 0xae1e7e49, - 0x00250e2d, 0x2071b35e, 0x226800bb, 0x57b8e0af, - 0x2464369b, 0xf009b91e, 0x5563911d, 0x59dfa6aa, - 0x78c14389, 0xd95a537f, 0x207d5ba2, 0x02e5b9c5, - 0x83260376, 0x6295cfa9, 0x11c81968, 0x4e734a41, - 0xb3472dca, 0x7b14a94a, 0x1b510052, 0x9a532915, - 0xd60f573f, 0xbc9bc6e4, 0x2b60a476, 0x81e67400, - 0x08ba6fb5, 0x571be91f, 0xf296ec6b, 0x2a0dd915, - 0xb6636521, 0xe7b9f9b6, 0xff34052e, 0xc5855664, - 0x53b02d5d, 0xa99f8fa1, 0x08ba4799, 0x6e85076a]), - new Uint32Array([ - 0x4b7a70e9, 0xb5b32944, 0xdb75092e, 0xc4192623, - 0xad6ea6b0, 0x49a7df7d, 0x9cee60b8, 0x8fedb266, - 0xecaa8c71, 0x699a17ff, 0x5664526c, 0xc2b19ee1, - 0x193602a5, 0x75094c29, 0xa0591340, 0xe4183a3e, - 0x3f54989a, 0x5b429d65, 0x6b8fe4d6, 0x99f73fd6, - 0xa1d29c07, 0xefe830f5, 0x4d2d38e6, 0xf0255dc1, - 0x4cdd2086, 0x8470eb26, 0x6382e9c6, 0x021ecc5e, - 0x09686b3f, 0x3ebaefc9, 0x3c971814, 0x6b6a70a1, - 0x687f3584, 0x52a0e286, 0xb79c5305, 0xaa500737, - 0x3e07841c, 0x7fdeae5c, 0x8e7d44ec, 0x5716f2b8, - 0xb03ada37, 0xf0500c0d, 0xf01c1f04, 0x0200b3ff, - 0xae0cf51a, 0x3cb574b2, 0x25837a58, 0xdc0921bd, - 0xd19113f9, 0x7ca92ff6, 0x94324773, 0x22f54701, - 0x3ae5e581, 0x37c2dadc, 0xc8b57634, 0x9af3dda7, - 0xa9446146, 0x0fd0030e, 0xecc8c73e, 0xa4751e41, - 0xe238cd99, 0x3bea0e2f, 0x3280bba1, 0x183eb331, - 0x4e548b38, 0x4f6db908, 0x6f420d03, 0xf60a04bf, - 0x2cb81290, 0x24977c79, 0x5679b072, 0xbcaf89af, - 0xde9a771f, 0xd9930810, 0xb38bae12, 0xdccf3f2e, - 0x5512721f, 0x2e6b7124, 0x501adde6, 0x9f84cd87, - 0x7a584718, 0x7408da17, 0xbc9f9abc, 0xe94b7d8c, - 0xec7aec3a, 0xdb851dfa, 0x63094366, 0xc464c3d2, - 0xef1c1847, 0x3215d908, 0xdd433b37, 0x24c2ba16, - 0x12a14d43, 0x2a65c451, 0x50940002, 0x133ae4dd, - 0x71dff89e, 0x10314e55, 0x81ac77d6, 0x5f11199b, - 0x043556f1, 0xd7a3c76b, 0x3c11183b, 0x5924a509, - 0xf28fe6ed, 0x97f1fbfa, 0x9ebabf2c, 0x1e153c6e, - 0x86e34570, 0xeae96fb1, 0x860e5e0a, 0x5a3e2ab3, - 0x771fe71c, 0x4e3d06fa, 0x2965dcb9, 0x99e71d0f, - 0x803e89d6, 0x5266c825, 0x2e4cc978, 0x9c10b36a, - 0xc6150eba, 0x94e2ea78, 0xa5fc3c53, 0x1e0a2df4, - 0xf2f74ea7, 0x361d2b3d, 0x1939260f, 0x19c27960, - 0x5223a708, 0xf71312b6, 0xebadfe6e, 0xeac31f66, - 0xe3bc4595, 0xa67bc883, 0xb17f37d1, 0x018cff28, - 0xc332ddef, 0xbe6c5aa5, 0x65582185, 0x68ab9802, - 0xeecea50f, 0xdb2f953b, 0x2aef7dad, 0x5b6e2f84, - 0x1521b628, 0x29076170, 0xecdd4775, 0x619f1510, - 0x13cca830, 0xeb61bd96, 0x0334fe1e, 0xaa0363cf, - 0xb5735c90, 0x4c70a239, 0xd59e9e0b, 0xcbaade14, - 0xeecc86bc, 0x60622ca7, 0x9cab5cab, 0xb2f3846e, - 0x648b1eaf, 0x19bdf0ca, 0xa02369b9, 0x655abb50, - 0x40685a32, 0x3c2ab4b3, 0x319ee9d5, 0xc021b8f7, - 0x9b540b19, 0x875fa099, 0x95f7997e, 0x623d7da8, - 0xf837889a, 0x97e32d77, 0x11ed935f, 0x16681281, - 0x0e358829, 0xc7e61fd6, 0x96dedfa1, 0x7858ba99, - 0x57f584a5, 0x1b227263, 0x9b83c3ff, 0x1ac24696, - 0xcdb30aeb, 0x532e3054, 0x8fd948e4, 0x6dbc3128, - 0x58ebf2ef, 0x34c6ffea, 0xfe28ed61, 0xee7c3c73, - 0x5d4a14d9, 0xe864b7e3, 0x42105d14, 0x203e13e0, - 0x45eee2b6, 0xa3aaabea, 0xdb6c4f15, 0xfacb4fd0, - 0xc742f442, 0xef6abbb5, 0x654f3b1d, 0x41cd2105, - 0xd81e799e, 0x86854dc7, 0xe44b476a, 0x3d816250, - 0xcf62a1f2, 0x5b8d2646, 0xfc8883a0, 0xc1c7b6a3, - 0x7f1524c3, 0x69cb7492, 0x47848a0b, 0x5692b285, - 0x095bbf00, 0xad19489d, 0x1462b174, 0x23820e00, - 0x58428d2a, 0x0c55f5ea, 0x1dadf43e, 0x233f7061, - 0x3372f092, 0x8d937e41, 0xd65fecf1, 0x6c223bdb, - 0x7cde3759, 0xcbee7460, 0x4085f2a7, 0xce77326e, - 0xa6078084, 0x19f8509e, 0xe8efd855, 0x61d99735, - 0xa969a7aa, 0xc50c06c2, 0x5a04abfc, 0x800bcadc, - 0x9e447a2e, 0xc3453484, 0xfdd56705, 0x0e1e9ec9, - 0xdb73dbd3, 0x105588cd, 0x675fda79, 0xe3674340, - 0xc5c43465, 0x713e38d8, 0x3d28f89e, 0xf16dff20, - 0x153e21e7, 0x8fb03d4a, 0xe6e39f2b, 0xdb83adf7]), - new Uint32Array([ - 0xe93d5a68, 0x948140f7, 0xf64c261c, 0x94692934, - 0x411520f7, 0x7602d4f7, 0xbcf46b2e, 0xd4a20068, - 0xd4082471, 0x3320f46a, 0x43b7d4b7, 0x500061af, - 0x1e39f62e, 0x97244546, 0x14214f74, 0xbf8b8840, - 0x4d95fc1d, 0x96b591af, 0x70f4ddd3, 0x66a02f45, - 0xbfbc09ec, 0x03bd9785, 0x7fac6dd0, 0x31cb8504, - 0x96eb27b3, 0x55fd3941, 0xda2547e6, 0xabca0a9a, - 0x28507825, 0x530429f4, 0x0a2c86da, 0xe9b66dfb, - 0x68dc1462, 0xd7486900, 0x680ec0a4, 0x27a18dee, - 0x4f3ffea2, 0xe887ad8c, 0xb58ce006, 0x7af4d6b6, - 0xaace1e7c, 0xd3375fec, 0xce78a399, 0x406b2a42, - 0x20fe9e35, 0xd9f385b9, 0xee39d7ab, 0x3b124e8b, - 0x1dc9faf7, 0x4b6d1856, 0x26a36631, 0xeae397b2, - 0x3a6efa74, 0xdd5b4332, 0x6841e7f7, 0xca7820fb, - 0xfb0af54e, 0xd8feb397, 0x454056ac, 0xba489527, - 0x55533a3a, 0x20838d87, 0xfe6ba9b7, 0xd096954b, - 0x55a867bc, 0xa1159a58, 0xcca92963, 0x99e1db33, - 0xa62a4a56, 0x3f3125f9, 0x5ef47e1c, 0x9029317c, - 0xfdf8e802, 0x04272f70, 0x80bb155c, 0x05282ce3, - 0x95c11548, 0xe4c66d22, 0x48c1133f, 0xc70f86dc, - 0x07f9c9ee, 0x41041f0f, 0x404779a4, 0x5d886e17, - 0x325f51eb, 0xd59bc0d1, 0xf2bcc18f, 0x41113564, - 0x257b7834, 0x602a9c60, 0xdff8e8a3, 0x1f636c1b, - 0x0e12b4c2, 0x02e1329e, 0xaf664fd1, 0xcad18115, - 0x6b2395e0, 0x333e92e1, 0x3b240b62, 0xeebeb922, - 0x85b2a20e, 0xe6ba0d99, 0xde720c8c, 0x2da2f728, - 0xd0127845, 0x95b794fd, 0x647d0862, 0xe7ccf5f0, - 0x5449a36f, 0x877d48fa, 0xc39dfd27, 0xf33e8d1e, - 0x0a476341, 0x992eff74, 0x3a6f6eab, 0xf4f8fd37, - 0xa812dc60, 0xa1ebddf8, 0x991be14c, 0xdb6e6b0d, - 0xc67b5510, 0x6d672c37, 0x2765d43b, 0xdcd0e804, - 0xf1290dc7, 0xcc00ffa3, 0xb5390f92, 0x690fed0b, - 0x667b9ffb, 0xcedb7d9c, 0xa091cf0b, 0xd9155ea3, - 0xbb132f88, 0x515bad24, 0x7b9479bf, 0x763bd6eb, - 0x37392eb3, 0xcc115979, 0x8026e297, 0xf42e312d, - 0x6842ada7, 0xc66a2b3b, 0x12754ccc, 0x782ef11c, - 0x6a124237, 0xb79251e7, 0x06a1bbe6, 0x4bfb6350, - 0x1a6b1018, 0x11caedfa, 0x3d25bdd8, 0xe2e1c3c9, - 0x44421659, 0x0a121386, 0xd90cec6e, 0xd5abea2a, - 0x64af674e, 0xda86a85f, 0xbebfe988, 0x64e4c3fe, - 0x9dbc8057, 0xf0f7c086, 0x60787bf8, 0x6003604d, - 0xd1fd8346, 0xf6381fb0, 0x7745ae04, 0xd736fccc, - 0x83426b33, 0xf01eab71, 0xb0804187, 0x3c005e5f, - 0x77a057be, 0xbde8ae24, 0x55464299, 0xbf582e61, - 0x4e58f48f, 0xf2ddfda2, 0xf474ef38, 0x8789bdc2, - 0x5366f9c3, 0xc8b38e74, 0xb475f255, 0x46fcd9b9, - 0x7aeb2661, 0x8b1ddf84, 0x846a0e79, 0x915f95e2, - 0x466e598e, 0x20b45770, 0x8cd55591, 0xc902de4c, - 0xb90bace1, 0xbb8205d0, 0x11a86248, 0x7574a99e, - 0xb77f19b6, 0xe0a9dc09, 0x662d09a1, 0xc4324633, - 0xe85a1f02, 0x09f0be8c, 0x4a99a025, 0x1d6efe10, - 0x1ab93d1d, 0x0ba5a4df, 0xa186f20f, 0x2868f169, - 0xdcb7da83, 0x573906fe, 0xa1e2ce9b, 0x4fcd7f52, - 0x50115e01, 0xa70683fa, 0xa002b5c4, 0x0de6d027, - 0x9af88c27, 0x773f8641, 0xc3604c06, 0x61a806b5, - 0xf0177a28, 0xc0f586e0, 0x006058aa, 0x30dc7d62, - 0x11e69ed7, 0x2338ea63, 0x53c2dd94, 0xc2c21634, - 0xbbcbee56, 0x90bcb6de, 0xebfc7da1, 0xce591d76, - 0x6f05e409, 0x4b7c0188, 0x39720a3d, 0x7c927c24, - 0x86e3725f, 0x724d9db9, 0x1ac15bb4, 0xd39eb8fc, - 0xed545578, 0x08fca5b5, 0xd83d7cd3, 0x4dad0fc4, - 0x1e50ef5e, 0xb161e6f8, 0xa28514d9, 0x6c51133c, - 0x6fd5c7e7, 0x56e14ec4, 0x362abfce, 0xddc6c837, - 0xd79a3234, 0x92638212, 0x670efa8e, 0x406000e0]), - new Uint32Array([ - 0x3a39ce37, 0xd3faf5cf, 0xabc27737, 0x5ac52d1b, - 0x5cb0679e, 0x4fa33742, 0xd3822740, 0x99bc9bbe, - 0xd5118e9d, 0xbf0f7315, 0xd62d1c7e, 0xc700c47b, - 0xb78c1b6b, 0x21a19045, 0xb26eb1be, 0x6a366eb4, - 0x5748ab2f, 0xbc946e79, 0xc6a376d2, 0x6549c2c8, - 0x530ff8ee, 0x468dde7d, 0xd5730a1d, 0x4cd04dc6, - 0x2939bbdb, 0xa9ba4650, 0xac9526e8, 0xbe5ee304, - 0xa1fad5f0, 0x6a2d519a, 0x63ef8ce2, 0x9a86ee22, - 0xc089c2b8, 0x43242ef6, 0xa51e03aa, 0x9cf2d0a4, - 0x83c061ba, 0x9be96a4d, 0x8fe51550, 0xba645bd6, - 0x2826a2f9, 0xa73a3ae1, 0x4ba99586, 0xef5562e9, - 0xc72fefd3, 0xf752f7da, 0x3f046f69, 0x77fa0a59, - 0x80e4a915, 0x87b08601, 0x9b09e6ad, 0x3b3ee593, - 0xe990fd5a, 0x9e34d797, 0x2cf0b7d9, 0x022b8b51, - 0x96d5ac3a, 0x017da67d, 0xd1cf3ed6, 0x7c7d2d28, - 0x1f9f25cf, 0xadf2b89b, 0x5ad6b472, 0x5a88f54c, - 0xe029ac71, 0xe019a5e6, 0x47b0acfd, 0xed93fa9b, - 0xe8d3c48d, 0x283b57cc, 0xf8d56629, 0x79132e28, - 0x785f0191, 0xed756055, 0xf7960e44, 0xe3d35e8c, - 0x15056dd4, 0x88f46dba, 0x03a16125, 0x0564f0bd, - 0xc3eb9e15, 0x3c9057a2, 0x97271aec, 0xa93a072a, - 0x1b3f6d9b, 0x1e6321f5, 0xf59c66fb, 0x26dcf319, - 0x7533d928, 0xb155fdf5, 0x03563482, 0x8aba3cbb, - 0x28517711, 0xc20ad9f8, 0xabcc5167, 0xccad925f, - 0x4de81751, 0x3830dc8e, 0x379d5862, 0x9320f991, - 0xea7a90c2, 0xfb3e7bce, 0x5121ce64, 0x774fbe32, - 0xa8b6e37e, 0xc3293d46, 0x48de5369, 0x6413e680, - 0xa2ae0810, 0xdd6db224, 0x69852dfd, 0x09072166, - 0xb39a460a, 0x6445c0dd, 0x586cdecf, 0x1c20c8ae, - 0x5bbef7dd, 0x1b588d40, 0xccd2017f, 0x6bb4e3bb, - 0xdda26a7e, 0x3a59ff45, 0x3e350a44, 0xbcb4cdd5, - 0x72eacea8, 0xfa6484bb, 0x8d6612ae, 0xbf3c6f47, - 0xd29be463, 0x542f5d9e, 0xaec2771b, 0xf64e6370, - 0x740e0d8d, 0xe75b1357, 0xf8721671, 0xaf537d5d, - 0x4040cb08, 0x4eb4e2cc, 0x34d2466a, 0x0115af84, - 0xe1b00428, 0x95983a1d, 0x06b89fb4, 0xce6ea048, - 0x6f3f3b82, 0x3520ab82, 0x011a1d4b, 0x277227f8, - 0x611560b1, 0xe7933fdc, 0xbb3a792b, 0x344525bd, - 0xa08839e1, 0x51ce794b, 0x2f32c9b7, 0xa01fbac9, - 0xe01cc87e, 0xbcc7d1f6, 0xcf0111c3, 0xa1e8aac7, - 0x1a908749, 0xd44fbd9a, 0xd0dadecb, 0xd50ada38, - 0x0339c32a, 0xc6913667, 0x8df9317c, 0xe0b12b4f, - 0xf79e59b7, 0x43f5bb3a, 0xf2d519ff, 0x27d9459c, - 0xbf97222c, 0x15e6fc2a, 0x0f91fc71, 0x9b941525, - 0xfae59361, 0xceb69ceb, 0xc2a86459, 0x12baa8d1, - 0xb6c1075e, 0xe3056a0c, 0x10d25065, 0xcb03a442, - 0xe0ec6e0e, 0x1698db3b, 0x4c98a0be, 0x3278e964, - 0x9f1f9532, 0xe0d392df, 0xd3a0342b, 0x8971f21e, - 0x1b0a7441, 0x4ba3348c, 0xc5be7120, 0xc37632d8, - 0xdf359f8d, 0x9b992f2e, 0xe60b6f47, 0x0fe3f11d, - 0xe54cda54, 0x1edad891, 0xce6279cf, 0xcd3e7e6f, - 0x1618b166, 0xfd2c1d05, 0x848fd2c5, 0xf6fb2299, - 0xf523f357, 0xa6327623, 0x93a83531, 0x56cccd02, - 0xacf08162, 0x5a75ebb5, 0x6e163697, 0x88d273cc, - 0xde966292, 0x81b949d0, 0x4c50901b, 0x71c65614, - 0xe6c6c7bd, 0x327a140a, 0x45e1d006, 0xc3f27b9a, - 0xc9aa53fd, 0x62a80f00, 0xbb25bfe2, 0x35bdd2f6, - 0x71126905, 0xb2040222, 0xb6cbcf7c, 0xcd769c2b, - 0x53113ec0, 0x1640e3d3, 0x38abbd60, 0x2547adf0, - 0xba38209c, 0xf746ce76, 0x77afa1c5, 0x20756060, - 0x85cbfe4e, 0x8ae88dd8, 0x7aaaf9b0, 0x4cf9aa7e, - 0x1948c25c, 0x02fb8a8c, 0x01c36ae4, 0xd6ebe1f9, - 0x90d4f869, 0xa65cdea0, 0x3f09252d, 0xc208e69f, - 0xb74e6132, 0xce77e25b, 0x578fdfe3, 0x3ac372e6]) - ]; - this.P = new Uint32Array([ - 0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344, - 0xa4093822, 0x299f31d0, 0x082efa98, 0xec4e6c89, - 0x452821e6, 0x38d01377, 0xbe5466cf, 0x34e90c6c, - 0xc0ac29b7, 0xc97c50dd, 0x3f84d5b5, 0xb5470917, - 0x9216d5d9, 0x8979fb1b]); -}; - -function F(S, x8, i) { - return (((S[0][x8[i+3]] + - S[1][x8[i+2]]) ^ - S[2][x8[i+1]]) + - S[3][x8[i]]); -}; - -Blowfish.prototype.encipher = function(x, x8) { - if (x8 === undefined) { - x8 = new Uint8Array(x.buffer); - if (x.byteOffset !== 0) - x8 = x8.subarray(x.byteOffset); - } - x[0] ^= this.P[0]; - for (var i = 1; i < 16; i += 2) { - x[1] ^= F(this.S, x8, 0) ^ this.P[i]; - x[0] ^= F(this.S, x8, 4) ^ this.P[i+1]; - } - var t = x[0]; - x[0] = x[1] ^ this.P[17]; - x[1] = t; -}; - -Blowfish.prototype.decipher = function(x) { - var x8 = new Uint8Array(x.buffer); - if (x.byteOffset !== 0) - x8 = x8.subarray(x.byteOffset); - x[0] ^= this.P[17]; - for (var i = 16; i > 0; i -= 2) { - x[1] ^= F(this.S, x8, 0) ^ this.P[i]; - x[0] ^= F(this.S, x8, 4) ^ this.P[i-1]; - } - var t = x[0]; - x[0] = x[1] ^ this.P[0]; - x[1] = t; -}; - -function stream2word(data, databytes){ - var i, temp = 0; - for (i = 0; i < 4; i++, BLF_J++) { - if (BLF_J >= databytes) BLF_J = 0; - temp = (temp << 8) | data[BLF_J]; - } - return temp; -}; - -Blowfish.prototype.expand0state = function(key, keybytes) { - var d = new Uint32Array(2), i, k; - var d8 = new Uint8Array(d.buffer); - - for (i = 0, BLF_J = 0; i < 18; i++) { - this.P[i] ^= stream2word(key, keybytes); - } - BLF_J = 0; - - for (i = 0; i < 18; i += 2) { - this.encipher(d, d8); - this.P[i] = d[0]; - this.P[i+1] = d[1]; - } - - for (i = 0; i < 4; i++) { - for (k = 0; k < 256; k += 2) { - this.encipher(d, d8); - this.S[i][k] = d[0]; - this.S[i][k+1] = d[1]; - } - } -}; - -Blowfish.prototype.expandstate = function(data, databytes, key, keybytes) { - var d = new Uint32Array(2), i, k; - - for (i = 0, BLF_J = 0; i < 18; i++) { - this.P[i] ^= stream2word(key, keybytes); - } - - for (i = 0, BLF_J = 0; i < 18; i += 2) { - d[0] ^= stream2word(data, databytes); - d[1] ^= stream2word(data, databytes); - this.encipher(d); - this.P[i] = d[0]; - this.P[i+1] = d[1]; - } - - for (i = 0; i < 4; i++) { - for (k = 0; k < 256; k += 2) { - d[0] ^= stream2word(data, databytes); - d[1] ^= stream2word(data, databytes); - this.encipher(d); - this.S[i][k] = d[0]; - this.S[i][k+1] = d[1]; - } - } - BLF_J = 0; -}; - -Blowfish.prototype.enc = function(data, blocks) { - for (var i = 0; i < blocks; i++) { - this.encipher(data.subarray(i*2)); - } -}; - -Blowfish.prototype.dec = function(data, blocks) { - for (var i = 0; i < blocks; i++) { - this.decipher(data.subarray(i*2)); - } -}; - -var BCRYPT_BLOCKS = 8, - BCRYPT_HASHSIZE = 32; - -function bcrypt_hash(sha2pass, sha2salt, out) { - var state = new Blowfish(), - cdata = new Uint32Array(BCRYPT_BLOCKS), i, - ciphertext = new Uint8Array([79,120,121,99,104,114,111,109,97,116,105, - 99,66,108,111,119,102,105,115,104,83,119,97,116,68,121,110,97,109, - 105,116,101]); //"OxychromaticBlowfishSwatDynamite" - - state.expandstate(sha2salt, 64, sha2pass, 64); - for (i = 0; i < 64; i++) { - state.expand0state(sha2salt, 64); - state.expand0state(sha2pass, 64); - } - - for (i = 0; i < BCRYPT_BLOCKS; i++) - cdata[i] = stream2word(ciphertext, ciphertext.byteLength); - for (i = 0; i < 64; i++) - state.enc(cdata, cdata.byteLength / 8); - - for (i = 0; i < BCRYPT_BLOCKS; i++) { - out[4*i+3] = cdata[i] >>> 24; - out[4*i+2] = cdata[i] >>> 16; - out[4*i+1] = cdata[i] >>> 8; - out[4*i+0] = cdata[i]; - } -}; - -function bcrypt_pbkdf(pass, passlen, salt, saltlen, key, keylen, rounds) { - var sha2pass = new Uint8Array(64), - sha2salt = new Uint8Array(64), - out = new Uint8Array(BCRYPT_HASHSIZE), - tmpout = new Uint8Array(BCRYPT_HASHSIZE), - countsalt = new Uint8Array(saltlen+4), - i, j, amt, stride, dest, count, - origkeylen = keylen; - - if (rounds < 1) - return -1; - if (passlen === 0 || saltlen === 0 || keylen === 0 || - keylen > (out.byteLength * out.byteLength) || saltlen > (1<<20)) - return -1; - - stride = Math.floor((keylen + out.byteLength - 1) / out.byteLength); - amt = Math.floor((keylen + stride - 1) / stride); - - for (i = 0; i < saltlen; i++) - countsalt[i] = salt[i]; - - crypto_hash_sha512(sha2pass, pass, passlen); - - for (count = 1; keylen > 0; count++) { - countsalt[saltlen+0] = count >>> 24; - countsalt[saltlen+1] = count >>> 16; - countsalt[saltlen+2] = count >>> 8; - countsalt[saltlen+3] = count; - - crypto_hash_sha512(sha2salt, countsalt, saltlen + 4); - bcrypt_hash(sha2pass, sha2salt, tmpout); - for (i = out.byteLength; i--;) - out[i] = tmpout[i]; - - for (i = 1; i < rounds; i++) { - crypto_hash_sha512(sha2salt, tmpout, tmpout.byteLength); - bcrypt_hash(sha2pass, sha2salt, tmpout); - for (j = 0; j < out.byteLength; j++) - out[j] ^= tmpout[j]; - } - - amt = Math.min(amt, keylen); - for (i = 0; i < amt; i++) { - dest = i * stride + (count - 1); - if (dest >= origkeylen) - break; - key[dest] = out[i]; - } - keylen -= i; - } - - return 0; -}; - -module.exports = { - BLOCKS: BCRYPT_BLOCKS, - HASHSIZE: BCRYPT_HASHSIZE, - hash: bcrypt_hash, - pbkdf: bcrypt_pbkdf -}; - - -/***/ }), - -/***/ 87558: -/***/ (function(module) { +/***/ 87558: +/***/ (function(module) { ;(function (globalObject) { 'use strict'; @@ -36147,401 +34841,3654 @@ module.exports = { /***/ }), -/***/ 20336: +/***/ 20336: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var DuplexStream = __nccwpck_require__(85519).Duplex + , util = __nccwpck_require__(31669) + +function BufferList (callback) { + if (!(this instanceof BufferList)) + return new BufferList(callback) + + this._bufs = [] + this.length = 0 + + if (typeof callback == 'function') { + this._callback = callback + + var piper = function piper (err) { + if (this._callback) { + this._callback(err) + this._callback = null + } + }.bind(this) + + this.on('pipe', function onPipe (src) { + src.on('error', piper) + }) + this.on('unpipe', function onUnpipe (src) { + src.removeListener('error', piper) + }) + } else { + this.append(callback) + } + + DuplexStream.call(this) +} + + +util.inherits(BufferList, DuplexStream) + + +BufferList.prototype._offset = function _offset (offset) { + var tot = 0, i = 0, _t + if (offset === 0) return [ 0, 0 ] + for (; i < this._bufs.length; i++) { + _t = tot + this._bufs[i].length + if (offset < _t || i == this._bufs.length - 1) { + return [ i, offset - tot ] + } + tot = _t + } +} + +BufferList.prototype._reverseOffset = function (blOffset) { + var bufferId = blOffset[0] + var offset = blOffset[1] + for (var i = 0; i < bufferId; i++) { + offset += this._bufs[i].length + } + return offset +} + +BufferList.prototype.append = function append (buf) { + var i = 0 + + if (Buffer.isBuffer(buf)) { + this._appendBuffer(buf) + } else if (Array.isArray(buf)) { + for (; i < buf.length; i++) + this.append(buf[i]) + } else if (buf instanceof BufferList) { + // unwrap argument into individual BufferLists + for (; i < buf._bufs.length; i++) + this.append(buf._bufs[i]) + } else if (buf != null) { + // coerce number arguments to strings, since Buffer(number) does + // uninitialized memory allocation + if (typeof buf == 'number') + buf = buf.toString() + + this._appendBuffer(Buffer.from(buf)) + } + + return this +} + + +BufferList.prototype._appendBuffer = function appendBuffer (buf) { + this._bufs.push(buf) + this.length += buf.length +} + + +BufferList.prototype._write = function _write (buf, encoding, callback) { + this._appendBuffer(buf) + + if (typeof callback == 'function') + callback() +} + + +BufferList.prototype._read = function _read (size) { + if (!this.length) + return this.push(null) + + size = Math.min(size, this.length) + this.push(this.slice(0, size)) + this.consume(size) +} + + +BufferList.prototype.end = function end (chunk) { + DuplexStream.prototype.end.call(this, chunk) + + if (this._callback) { + this._callback(null, this.slice()) + this._callback = null + } +} + + +BufferList.prototype.get = function get (index) { + if (index > this.length || index < 0) { + return undefined + } + var offset = this._offset(index) + return this._bufs[offset[0]][offset[1]] +} + + +BufferList.prototype.slice = function slice (start, end) { + if (typeof start == 'number' && start < 0) + start += this.length + if (typeof end == 'number' && end < 0) + end += this.length + return this.copy(null, 0, start, end) +} + + +BufferList.prototype.copy = function copy (dst, dstStart, srcStart, srcEnd) { + if (typeof srcStart != 'number' || srcStart < 0) + srcStart = 0 + if (typeof srcEnd != 'number' || srcEnd > this.length) + srcEnd = this.length + if (srcStart >= this.length) + return dst || Buffer.alloc(0) + if (srcEnd <= 0) + return dst || Buffer.alloc(0) + + var copy = !!dst + , off = this._offset(srcStart) + , len = srcEnd - srcStart + , bytes = len + , bufoff = (copy && dstStart) || 0 + , start = off[1] + , l + , i + + // copy/slice everything + if (srcStart === 0 && srcEnd == this.length) { + if (!copy) { // slice, but full concat if multiple buffers + return this._bufs.length === 1 + ? this._bufs[0] + : Buffer.concat(this._bufs, this.length) + } + + // copy, need to copy individual buffers + for (i = 0; i < this._bufs.length; i++) { + this._bufs[i].copy(dst, bufoff) + bufoff += this._bufs[i].length + } + + return dst + } + + // easy, cheap case where it's a subset of one of the buffers + if (bytes <= this._bufs[off[0]].length - start) { + return copy + ? this._bufs[off[0]].copy(dst, dstStart, start, start + bytes) + : this._bufs[off[0]].slice(start, start + bytes) + } + + if (!copy) // a slice, we need something to copy in to + dst = Buffer.allocUnsafe(len) + + for (i = off[0]; i < this._bufs.length; i++) { + l = this._bufs[i].length - start + + if (bytes > l) { + this._bufs[i].copy(dst, bufoff, start) + bufoff += l + } else { + this._bufs[i].copy(dst, bufoff, start, start + bytes) + bufoff += l + break + } + + bytes -= l + + if (start) + start = 0 + } + + // safeguard so that we don't return uninitialized memory + if (dst.length > bufoff) return dst.slice(0, bufoff) + + return dst +} + +BufferList.prototype.shallowSlice = function shallowSlice (start, end) { + start = start || 0 + end = typeof end !== 'number' ? this.length : end + + if (start < 0) + start += this.length + if (end < 0) + end += this.length + + if (start === end) { + return new BufferList() + } + var startOffset = this._offset(start) + , endOffset = this._offset(end) + , buffers = this._bufs.slice(startOffset[0], endOffset[0] + 1) + + if (endOffset[1] == 0) + buffers.pop() + else + buffers[buffers.length-1] = buffers[buffers.length-1].slice(0, endOffset[1]) + + if (startOffset[1] != 0) + buffers[0] = buffers[0].slice(startOffset[1]) + + return new BufferList(buffers) +} + +BufferList.prototype.toString = function toString (encoding, start, end) { + return this.slice(start, end).toString(encoding) +} + +BufferList.prototype.consume = function consume (bytes) { + // first, normalize the argument, in accordance with how Buffer does it + bytes = Math.trunc(bytes) + // do nothing if not a positive number + if (Number.isNaN(bytes) || bytes <= 0) return this + + while (this._bufs.length) { + if (bytes >= this._bufs[0].length) { + bytes -= this._bufs[0].length + this.length -= this._bufs[0].length + this._bufs.shift() + } else { + this._bufs[0] = this._bufs[0].slice(bytes) + this.length -= bytes + break + } + } + return this +} + + +BufferList.prototype.duplicate = function duplicate () { + var i = 0 + , copy = new BufferList() + + for (; i < this._bufs.length; i++) + copy.append(this._bufs[i]) + + return copy +} + + +BufferList.prototype._destroy = function _destroy (err, cb) { + this._bufs.length = 0 + this.length = 0 + cb(err) +} + + +BufferList.prototype.indexOf = function (search, offset, encoding) { + if (encoding === undefined && typeof offset === 'string') { + encoding = offset + offset = undefined + } + if (typeof search === 'function' || Array.isArray(search)) { + throw new TypeError('The "value" argument must be one of type string, Buffer, BufferList, or Uint8Array.') + } else if (typeof search === 'number') { + search = Buffer.from([search]) + } else if (typeof search === 'string') { + search = Buffer.from(search, encoding) + } else if (search instanceof BufferList) { + search = search.slice() + } else if (!Buffer.isBuffer(search)) { + search = Buffer.from(search) + } + + offset = Number(offset || 0) + if (isNaN(offset)) { + offset = 0 + } + + if (offset < 0) { + offset = this.length + offset + } + + if (offset < 0) { + offset = 0 + } + + if (search.length === 0) { + return offset > this.length ? this.length : offset + } + + var blOffset = this._offset(offset) + var blIndex = blOffset[0] // index of which internal buffer we're working on + var buffOffset = blOffset[1] // offset of the internal buffer we're working on + + // scan over each buffer + for (blIndex; blIndex < this._bufs.length; blIndex++) { + var buff = this._bufs[blIndex] + while(buffOffset < buff.length) { + var availableWindow = buff.length - buffOffset + if (availableWindow >= search.length) { + var nativeSearchResult = buff.indexOf(search, buffOffset) + if (nativeSearchResult !== -1) { + return this._reverseOffset([blIndex, nativeSearchResult]) + } + buffOffset = buff.length - search.length + 1 // end of native search window + } else { + var revOffset = this._reverseOffset([blIndex, buffOffset]) + if (this._match(revOffset, search)) { + return revOffset + } + buffOffset++ + } + } + buffOffset = 0 + } + return -1 +} + +BufferList.prototype._match = function(offset, search) { + if (this.length - offset < search.length) { + return false + } + for (var searchOffset = 0; searchOffset < search.length ; searchOffset++) { + if(this.get(offset + searchOffset) !== search[searchOffset]){ + return false + } + } + return true +} + + +;(function () { + var methods = { + 'readDoubleBE' : 8 + , 'readDoubleLE' : 8 + , 'readFloatBE' : 4 + , 'readFloatLE' : 4 + , 'readInt32BE' : 4 + , 'readInt32LE' : 4 + , 'readUInt32BE' : 4 + , 'readUInt32LE' : 4 + , 'readInt16BE' : 2 + , 'readInt16LE' : 2 + , 'readUInt16BE' : 2 + , 'readUInt16LE' : 2 + , 'readInt8' : 1 + , 'readUInt8' : 1 + , 'readIntBE' : null + , 'readIntLE' : null + , 'readUIntBE' : null + , 'readUIntLE' : null + } + + for (var m in methods) { + (function (m) { + if (methods[m] === null) { + BufferList.prototype[m] = function (offset, byteLength) { + return this.slice(offset, offset + byteLength)[m](0, byteLength) + } + } + else { + BufferList.prototype[m] = function (offset) { + return this.slice(offset, offset + methods[m])[m](0) + } + } + }(m)) + } +}()) + + +module.exports = BufferList + + +/***/ }), + +/***/ 97713: +/***/ ((module) => { + +"use strict"; + + +const codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error + } + + function getMessage (arg1, arg2, arg3) { + if (typeof message === 'string') { + return message + } else { + return message(arg1, arg2, arg3) + } + } + + class NodeError extends Base { + constructor (arg1, arg2, arg3) { + super(getMessage(arg1, arg2, arg3)); + } + } + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + + codes[code] = NodeError; +} + +// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + const len = expected.length; + expected = expected.map((i) => String(i)); + if (len > 2) { + return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + + expected[len - 1]; + } else if (len === 2) { + return `one of ${thing} ${expected[0]} or ${expected[1]}`; + } else { + return `of ${thing} ${expected[0]}`; + } + } else { + return `of ${thing} ${String(expected)}`; + } +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + return str.substring(this_len - search.length, this_len) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"' +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + let determiner; + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + let msg; + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; + } else { + const type = includes(name, '.') ? 'property' : 'argument'; + msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; + } + + msg += `. Received type ${typeof actual}`; + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented' +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); + +module.exports.q = codes; + + +/***/ }), + +/***/ 13928: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + +/**/ + +var objectKeys = Object.keys || function (obj) { + var keys = []; + + for (var key in obj) { + keys.push(key); + } + + return keys; +}; +/**/ + + +module.exports = Duplex; + +var Readable = __nccwpck_require__(85209); + +var Writable = __nccwpck_require__(58729); + +__nccwpck_require__(44124)(Duplex, Readable); + +{ + // Allow the keys array to be GC'ed. + var keys = objectKeys(Writable.prototype); + + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} + +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + Readable.call(this, options); + Writable.call(this, options); + this.allowHalfOpen = true; + + if (options) { + if (options.readable === false) this.readable = false; + if (options.writable === false) this.writable = false; + + if (options.allowHalfOpen === false) { + this.allowHalfOpen = false; + this.once('end', onend); + } + } +} + +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); +Object.defineProperty(Duplex.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +Object.defineProperty(Duplex.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); // the no-half-open enforcer + +function onend() { + // If the writable side ended, then we're ok. + if (this._writableState.ended) return; // no more data can be written. + // But allow more writes to happen in this tick. + + process.nextTick(onEndNT, this); +} + +function onEndNT(self) { + self.end(); +} + +Object.defineProperty(Duplex.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } // backward compatibility, the user is explicitly + // managing destroyed + + + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); + +/***/ }), + +/***/ 4991: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + + +module.exports = PassThrough; + +var Transform = __nccwpck_require__(26753); + +__nccwpck_require__(44124)(PassThrough, Transform); + +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + Transform.call(this, options); +} + +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; + +/***/ }), + +/***/ 85209: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + +module.exports = Readable; +/**/ + +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; +/**/ + +var EE = __nccwpck_require__(28614).EventEmitter; + +var EElistenerCount = function EElistenerCount(emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ + + +var Stream = __nccwpck_require__(36238); +/**/ + + +var Buffer = __nccwpck_require__(64293).Buffer; + +var OurUint8Array = global.Uint8Array || function () {}; + +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} + +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} +/**/ + + +var debugUtil = __nccwpck_require__(31669); + +var debug; + +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function debug() {}; +} +/**/ + + +var BufferList = __nccwpck_require__(70662); + +var destroyImpl = __nccwpck_require__(36994); + +var _require = __nccwpck_require__(3533), + getHighWaterMark = _require.getHighWaterMark; + +var _require$codes = __nccwpck_require__(97713)/* .codes */ .q, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; // Lazy loaded to improve the startup performance. + + +var StringDecoder; +var createReadableStreamAsyncIterator; +var from; + +__nccwpck_require__(44124)(Readable, Stream); + +var errorOrDestroy = destroyImpl.errorOrDestroy; +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; + +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} + +function ReadableState(options, stream, isDuplex) { + Duplex = Duplex || __nccwpck_require__(13928); + options = options || {}; // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + + this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + + this.sync = true; // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + this.paused = true; // Should close be emitted on destroy. Defaults to true. + + this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'end' (and potentially 'finish') + + this.autoDestroy = !!options.autoDestroy; // has it been destroyed + + this.destroyed = false; // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + + this.defaultEncoding = options.defaultEncoding || 'utf8'; // the number of writers that are awaiting a drain event in .pipe()s + + this.awaitDrain = 0; // if true, a maybeReadMore has been scheduled + + this.readingMore = false; + this.decoder = null; + this.encoding = null; + + if (options.encoding) { + if (!StringDecoder) StringDecoder = __nccwpck_require__(94841)/* .StringDecoder */ .s; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} + +function Readable(options) { + Duplex = Duplex || __nccwpck_require__(13928); + if (!(this instanceof Readable)) return new Readable(options); // Checking for a Stream.Duplex instance is faster here instead of inside + // the ReadableState constructor, at least with V8 6.5 + + var isDuplex = this instanceof Duplex; + this._readableState = new ReadableState(options, this, isDuplex); // legacy + + this.readable = true; + + if (options) { + if (typeof options.read === 'function') this._read = options.read; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + + Stream.call(this); +} + +Object.defineProperty(Readable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined) { + return false; + } + + return this._readableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } // backward compatibility, the user is explicitly + // managing destroyed + + + this._readableState.destroyed = value; + } +}); +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; + +Readable.prototype._destroy = function (err, cb) { + cb(err); +}; // Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. + + +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; // Unshift should *always* be something directly out of read() + + +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; + +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + debug('readableAddChunk', chunk); + var state = stream._readableState; + + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + + if (er) { + errorOrDestroy(stream, er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (addToFront) { + if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); + } else if (state.ended) { + errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); + } else if (state.destroyed) { + return false; + } else { + state.reading = false; + + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + maybeReadMore(stream, state); + } + } // We can push more data if we are below the highWaterMark. + // Also, if we have no data yet, we can stand some more bytes. + // This is to work around cases where hwm=0, such as the repl. + + + return !state.ended && (state.length < state.highWaterMark || state.length === 0); +} + +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + state.awaitDrain = 0; + stream.emit('data', chunk); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + if (state.needReadable) emitReadable(stream); + } + + maybeReadMore(stream, state); +} + +function chunkInvalid(state, chunk) { + var er; + + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); + } + + return er; +} + +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; // backwards compatibility. + + +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = __nccwpck_require__(94841)/* .StringDecoder */ .s; + var decoder = new StringDecoder(enc); + this._readableState.decoder = decoder; // If setEncoding(null), decoder.encoding equals utf8 + + this._readableState.encoding = this._readableState.decoder.encoding; // Iterate over current buffer to convert already stored Buffers: + + var p = this._readableState.buffer.head; + var content = ''; + + while (p !== null) { + content += decoder.write(p.data); + p = p.next; + } + + this._readableState.buffer.clear(); + + if (content !== '') this._readableState.buffer.push(content); + this._readableState.length = content.length; + return this; +}; // Don't raise the hwm > 1GB + + +var MAX_HWM = 0x40000000; + +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + + return n; +} // This function is designed to be inlinable, so please take care when making +// changes to the function body. + + +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } // If we're asking for more than the current hwm, then raise the hwm. + + + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; // Don't have enough + + if (!state.ended) { + state.needReadable = true; + return 0; + } + + return state.length; +} // you can override either this method, or the async _read(n) below. + + +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + if (n !== 0) state.emittedReadable = false; // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + + if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + + n = howMuchToRead(n, state); // if we've ended, and we're now clear, then finish it up. + + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + // if we need a readable event, then we need to do some reading. + + + var doRead = state.needReadable; + debug('need readable', doRead); // if we currently have less than the highWaterMark, then also read some + + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + + + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; // if the length is currently zero, then we *need* a readable event. + + if (state.length === 0) state.needReadable = true; // call internal read method + + this._read(state.highWaterMark); + + state.sync = false; // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + + if (!state.reading) n = howMuchToRead(nOrig, state); + } + + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + + if (ret === null) { + state.needReadable = state.length <= state.highWaterMark; + n = 0; + } else { + state.length -= n; + state.awaitDrain = 0; + } + + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; // If we tried to read() past the EOF, then emit end on the next tick. + + if (nOrig !== n && state.ended) endReadable(this); + } + + if (ret !== null) this.emit('data', ret); + return ret; +}; + +function onEofChunk(stream, state) { + debug('onEofChunk'); + if (state.ended) return; + + if (state.decoder) { + var chunk = state.decoder.end(); + + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + + state.ended = true; + + if (state.sync) { + // if we are sync, wait until next tick to emit the data. + // Otherwise we risk emitting data in the flow() + // the readable code triggers during a read() call + emitReadable(stream); + } else { + // emit 'readable' now to make sure it gets picked up. + state.needReadable = false; + + if (!state.emittedReadable) { + state.emittedReadable = true; + emitReadable_(stream); + } + } +} // Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. + + +function emitReadable(stream) { + var state = stream._readableState; + debug('emitReadable', state.needReadable, state.emittedReadable); + state.needReadable = false; + + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + process.nextTick(emitReadable_, stream); + } +} + +function emitReadable_(stream) { + var state = stream._readableState; + debug('emitReadable_', state.destroyed, state.length, state.ended); + + if (!state.destroyed && (state.length || state.ended)) { + stream.emit('readable'); + state.emittedReadable = false; + } // The stream needs another readable event if + // 1. It is not flowing, as the flow mechanism will take + // care of it. + // 2. It is not ended. + // 3. It is below the highWaterMark, so we can schedule + // another readable later. + + + state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; + flow(stream); +} // at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. + + +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + process.nextTick(maybeReadMore_, stream, state); + } +} + +function maybeReadMore_(stream, state) { + // Attempt to read more data if we should. + // + // The conditions for reading more data are (one of): + // - Not enough data buffered (state.length < state.highWaterMark). The loop + // is responsible for filling the buffer with enough data if such data + // is available. If highWaterMark is 0 and we are not in the flowing mode + // we should _not_ attempt to buffer any extra data. We'll get more data + // when the stream consumer calls read() instead. + // - No data in the buffer, and the stream is in flowing mode. In this mode + // the loop below is responsible for ensuring read() is called. Failing to + // call read here would abort the flow and there's no other mechanism for + // continuing the flow if the stream consumer has just subscribed to the + // 'data' event. + // + // In addition to the above conditions to keep reading data, the following + // conditions prevent the data from being read: + // - The stream has ended (state.ended). + // - There is already a pending 'read' operation (state.reading). This is a + // case where the the stream has called the implementation defined _read() + // method, but they are processing the call asynchronously and have _not_ + // called push() with new data. In this case we skip performing more + // read()s. The execution ends in this method again after the _read() ends + // up calling push() with more data. + while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { + var len = state.length; + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) // didn't get any data, stop spinning. + break; + } + + state.readingMore = false; +} // abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. + + +Readable.prototype._read = function (n) { + errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); +}; + +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + + case 1: + state.pipes = [state.pipes, dest]; + break; + + default: + state.pipes.push(dest); + break; + } + + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); + dest.on('unpipe', onunpipe); + + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + + function onend() { + debug('onend'); + dest.end(); + } // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + + + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + var cleanedUp = false; + + function cleanup() { + debug('cleanup'); // cleanup event handlers once the pipe is broken + + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + cleanedUp = true; // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + + src.on('data', ondata); + + function ondata(chunk) { + debug('ondata'); + var ret = dest.write(chunk); + debug('dest.write', ret); + + if (ret === false) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', state.awaitDrain); + state.awaitDrain++; + } + + src.pause(); + } + } // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + + + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); + } // Make sure our error handler is attached before userland ones. + + + prependListener(dest, 'error', onerror); // Both close and finish should trigger unpipe, but only once. + + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + + dest.once('close', onclose); + + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + + dest.once('finish', onfinish); + + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } // tell the dest that it's being piped to + + + dest.emit('pipe', src); // start the flow if it hasn't been started already. + + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + + return dest; +}; + +function pipeOnDrain(src) { + return function pipeOnDrainFunctionResult() { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} + +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { + hasUnpiped: false + }; // if we're not piping anywhere, then do nothing. + + if (state.pipesCount === 0) return this; // just one destination. most common case. + + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + if (!dest) dest = state.pipes; // got a match. + + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } // slow case. multiple pipe destinations. + + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + + for (var i = 0; i < len; i++) { + dests[i].emit('unpipe', this, { + hasUnpiped: false + }); + } + + return this; + } // try to find the right one. + + + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + dest.emit('unpipe', this, unpipeInfo); + return this; +}; // set up data events if they are asked for +// Ensure readable listeners eventually get something + + +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + var state = this._readableState; + + if (ev === 'data') { + // update readableListening so that resume() may be a no-op + // a few lines down. This is needed to support once('readable'). + state.readableListening = this.listenerCount('readable') > 0; // Try start flowing on next tick if stream isn't explicitly paused + + if (state.flowing !== false) this.resume(); + } else if (ev === 'readable') { + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.flowing = false; + state.emittedReadable = false; + debug('on readable', state.length, state.reading); + + if (state.length) { + emitReadable(this); + } else if (!state.reading) { + process.nextTick(nReadingNextTick, this); + } + } + } + + return res; +}; + +Readable.prototype.addListener = Readable.prototype.on; + +Readable.prototype.removeListener = function (ev, fn) { + var res = Stream.prototype.removeListener.call(this, ev, fn); + + if (ev === 'readable') { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + + return res; +}; + +Readable.prototype.removeAllListeners = function (ev) { + var res = Stream.prototype.removeAllListeners.apply(this, arguments); + + if (ev === 'readable' || ev === undefined) { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + + return res; +}; + +function updateReadableListening(self) { + var state = self._readableState; + state.readableListening = self.listenerCount('readable') > 0; + + if (state.resumeScheduled && !state.paused) { + // flowing needs to be set to true now, otherwise + // the upcoming resume will not flow. + state.flowing = true; // crude way to check if we should resume + } else if (self.listenerCount('data') > 0) { + self.resume(); + } +} + +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} // pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. + + +Readable.prototype.resume = function () { + var state = this._readableState; + + if (!state.flowing) { + debug('resume'); // we flow only if there is no one listening + // for readable, but we still have to call + // resume() + + state.flowing = !state.readableListening; + resume(this, state); + } + + state.paused = false; + return this; +}; + +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + process.nextTick(resume_, stream, state); + } +} + +function resume_(stream, state) { + debug('resume', state.reading); + + if (!state.reading) { + stream.read(0); + } + + state.resumeScheduled = false; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} + +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + + if (this._readableState.flowing !== false) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + + this._readableState.paused = true; + return this; +}; + +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + + while (state.flowing && stream.read() !== null) { + ; + } +} // wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. + + +Readable.prototype.wrap = function (stream) { + var _this = this; + + var state = this._readableState; + var paused = false; + stream.on('end', function () { + debug('wrapped end'); + + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + + _this.push(null); + }); + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); // don't skip over falsy values in objectMode + + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + + var ret = _this.push(chunk); + + if (!ret) { + paused = true; + stream.pause(); + } + }); // proxy all the other methods. + // important when wrapping filters and duplexes. + + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function methodWrap(method) { + return function methodWrapReturnFunction() { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } // proxy certain important events. + + + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } // when we try to consume some more bytes, simply unpause the + // underlying stream. + + + this._read = function (n) { + debug('wrapped _read', n); + + if (paused) { + paused = false; + stream.resume(); + } + }; + + return this; +}; + +if (typeof Symbol === 'function') { + Readable.prototype[Symbol.asyncIterator] = function () { + if (createReadableStreamAsyncIterator === undefined) { + createReadableStreamAsyncIterator = __nccwpck_require__(37558); + } + + return createReadableStreamAsyncIterator(this); + }; +} + +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.highWaterMark; + } +}); +Object.defineProperty(Readable.prototype, 'readableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState && this._readableState.buffer; + } +}); +Object.defineProperty(Readable.prototype, 'readableFlowing', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.flowing; + }, + set: function set(state) { + if (this._readableState) { + this._readableState.flowing = state; + } + } +}); // exposed for testing purposes only. + +Readable._fromList = fromList; +Object.defineProperty(Readable.prototype, 'readableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.length; + } +}); // Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. + +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = state.buffer.consume(n, state.decoder); + } + return ret; +} + +function endReadable(stream) { + var state = stream._readableState; + debug('endReadable', state.endEmitted); + + if (!state.endEmitted) { + state.ended = true; + process.nextTick(endReadableNT, state, stream); + } +} + +function endReadableNT(state, stream) { + debug('endReadableNT', state.endEmitted, state.length); // Check that we didn't get one last unshift. + + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the writable side is ready for autoDestroy as well + var wState = stream._writableState; + + if (!wState || wState.autoDestroy && wState.finished) { + stream.destroy(); + } + } + } +} + +if (typeof Symbol === 'function') { + Readable.from = function (iterable, opts) { + if (from === undefined) { + from = __nccwpck_require__(57039); + } + + return from(Readable, iterable, opts); + }; +} + +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + + return -1; +} + +/***/ }), + +/***/ 26753: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + + +module.exports = Transform; + +var _require$codes = __nccwpck_require__(97713)/* .codes */ .q, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, + ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; + +var Duplex = __nccwpck_require__(13928); + +__nccwpck_require__(44124)(Transform, Duplex); + +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + var cb = ts.writecb; + + if (cb === null) { + return this.emit('error', new ERR_MULTIPLE_CALLBACK()); + } + + ts.writechunk = null; + ts.writecb = null; + if (data != null) // single equals check for both `null` and `undefined` + this.push(data); + cb(er); + var rs = this._readableState; + rs.reading = false; + + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} + +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + Duplex.call(this, options); + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; // start out asking for a readable event once data is transformed. + + this._readableState.needReadable = true; // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + + this._readableState.sync = false; + + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + if (typeof options.flush === 'function') this._flush = options.flush; + } // When the writable side finishes, then flush out anything remaining. + + + this.on('prefinish', prefinish); +} + +function prefinish() { + var _this = this; + + if (typeof this._flush === 'function' && !this._readableState.destroyed) { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} + +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; // This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. + + +Transform.prototype._transform = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); +}; + +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; // Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. + + +Transform.prototype._read = function (n) { + var ts = this._transformState; + + if (ts.writechunk !== null && !ts.transforming) { + ts.transforming = true; + + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; + +Transform.prototype._destroy = function (err, cb) { + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + }); +}; + +function done(stream, er, data) { + if (er) return stream.emit('error', er); + if (data != null) // single equals check for both `null` and `undefined` + stream.push(data); // TODO(BridgeAR): Write a test for these two error cases + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + + if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); + if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); + return stream.push(null); +} + +/***/ }), + +/***/ 58729: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + + +module.exports = Writable; +/* */ + +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} // It seems a linked list but it is not +// there will be only 2 of these for each stream + + +function CorkedRequest(state) { + var _this = this; + + this.next = null; + this.entry = null; + + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ + + +var Duplex; +/**/ + +Writable.WritableState = WritableState; +/**/ + +var internalUtil = { + deprecate: __nccwpck_require__(65278) +}; +/**/ + +/**/ + +var Stream = __nccwpck_require__(36238); +/**/ + + +var Buffer = __nccwpck_require__(64293).Buffer; + +var OurUint8Array = global.Uint8Array || function () {}; + +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} + +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +var destroyImpl = __nccwpck_require__(36994); + +var _require = __nccwpck_require__(3533), + getHighWaterMark = _require.getHighWaterMark; + +var _require$codes = __nccwpck_require__(97713)/* .codes */ .q, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, + ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, + ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, + ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; + +var errorOrDestroy = destroyImpl.errorOrDestroy; + +__nccwpck_require__(44124)(Writable, Stream); + +function nop() {} + +function WritableState(options, stream, isDuplex) { + Duplex = Duplex || __nccwpck_require__(13928); + options = options || {}; // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream, + // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. + + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag to indicate whether or not this stream + // contains buffers or objects. + + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + + this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); // if _final has been called + + this.finalCalled = false; // drain event flag. + + this.needDrain = false; // at the start of calling end() + + this.ending = false; // when end() has been called, and returned + + this.ended = false; // when 'finish' is emitted + + this.finished = false; // has it been destroyed + + this.destroyed = false; // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + + this.defaultEncoding = options.defaultEncoding || 'utf8'; // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + + this.length = 0; // a flag to see when we're in the middle of a write. + + this.writing = false; // when true all writes will be buffered until .uncork() call + + this.corked = 0; // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + + this.sync = true; // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + + this.bufferProcessing = false; // the callback that's passed to _write(chunk,cb) + + this.onwrite = function (er) { + onwrite(stream, er); + }; // the callback that the user supplies to write(chunk,encoding,cb) + + + this.writecb = null; // the amount that is being written when _write is called. + + this.writelen = 0; + this.bufferedRequest = null; + this.lastBufferedRequest = null; // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + + this.pendingcb = 0; // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + + this.prefinished = false; // True if the error was already emitted and should not be thrown again + + this.errorEmitted = false; // Should close be emitted on destroy. Defaults to true. + + this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'finish' (and potentially 'end') + + this.autoDestroy = !!options.autoDestroy; // count buffered requests + + this.bufferedRequestCount = 0; // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + + this.corkedRequestsFree = new CorkedRequest(this); +} + +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + + while (current) { + out.push(current); + current = current.next; + } + + return out; +}; + +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function writableStateBufferGetter() { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); // Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. + + +var realHasInstance; + +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function value(object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function realHasInstance(object) { + return object instanceof this; + }; +} + +function Writable(options) { + Duplex = Duplex || __nccwpck_require__(13928); // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + // Checking for a Stream.Duplex instance is faster here instead of inside + // the WritableState constructor, at least with V8 6.5 + + var isDuplex = this instanceof Duplex; + if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); + this._writableState = new WritableState(options, this, isDuplex); // legacy. + + this.writable = true; + + if (options) { + if (typeof options.write === 'function') this._write = options.write; + if (typeof options.writev === 'function') this._writev = options.writev; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + if (typeof options.final === 'function') this._final = options.final; + } + + Stream.call(this); +} // Otherwise people can pipe Writable streams, which is just wrong. + + +Writable.prototype.pipe = function () { + errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); +}; + +function writeAfterEnd(stream, cb) { + var er = new ERR_STREAM_WRITE_AFTER_END(); // TODO: defer error events consistently everywhere, not just the cb + + errorOrDestroy(stream, er); + process.nextTick(cb, er); +} // Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. + + +function validChunk(stream, state, chunk, cb) { + var er; + + if (chunk === null) { + er = new ERR_STREAM_NULL_VALUES(); + } else if (typeof chunk !== 'string' && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); + } + + if (er) { + errorOrDestroy(stream, er); + process.nextTick(cb, er); + return false; + } + + return true; +} + +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + + var isBuf = !state.objectMode && _isUint8Array(chunk); + + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + if (typeof cb !== 'function') cb = nop; + if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + return ret; +}; + +Writable.prototype.cork = function () { + this._writableState.corked++; +}; + +Writable.prototype.uncork = function () { + var state = this._writableState; + + if (state.corked) { + state.corked--; + if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; + +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; + +Object.defineProperty(Writable.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); + +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + + return chunk; +} + +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); // if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. + +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + + var len = state.objectMode ? 1 : chunk.length; + state.length += len; + var ret = state.length < state.highWaterMark; // we must ensure that previous needDrain will not be reset to false. + + if (!ret) state.needDrain = true; + + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + + return ret; +} + +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} + +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + process.nextTick(cb, er); // this can emit finish, and it will always happen + // after error + + process.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); // this can emit finish, but finish must + // always follow error + + finishMaybe(stream, state); + } +} + +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} + +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); + onwriteStateUpdate(state); + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state) || stream.destroyed; + + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + + if (sync) { + process.nextTick(afterWrite, stream, state, finished, cb); + } else { + afterWrite(stream, state, finished, cb); + } + } +} + +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} // Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. + + +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} // if there's something in the buffer waiting, then process it + + +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + var count = 0; + var allBuffers = true; + + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + + buffer.allBuffers = allBuffers; + doWrite(stream, state, true, state.length, buffer, '', holder.finish); // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + + state.pendingcb++; + state.lastBufferedRequest = null; + + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + + if (state.writing) { + break; + } + } + + if (entry === null) state.lastBufferedRequest = null; + } + + state.bufferedRequest = entry; + state.bufferProcessing = false; +} + +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); +}; + +Writable.prototype._writev = null; + +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); // .end() fully uncorks + + if (state.corked) { + state.corked = 1; + this.uncork(); + } // ignore unnecessary end() calls. + + + if (!state.ending) endWritable(this, state, cb); + return this; +}; + +Object.defineProperty(Writable.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); + +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} + +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + + if (err) { + errorOrDestroy(stream, err); + } + + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} + +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function' && !state.destroyed) { + state.pendingcb++; + state.finalCalled = true; + process.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} + +function finishMaybe(stream, state) { + var need = needFinish(state); + + if (need) { + prefinish(stream, state); + + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the readable side is ready for autoDestroy as well + var rState = stream._readableState; + + if (!rState || rState.autoDestroy && rState.endEmitted) { + stream.destroy(); + } + } + } + } + + return need; +} + +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + + if (cb) { + if (state.finished) process.nextTick(cb);else stream.once('finish', cb); + } + + state.ended = true; + stream.writable = false; +} + +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } // reuse the free corkReq. + + + state.corkedRequestsFree.next = corkReq; +} + +Object.defineProperty(Writable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._writableState === undefined) { + return false; + } + + return this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } // backward compatibility, the user is explicitly + // managing destroyed + + + this._writableState.destroyed = value; + } +}); +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; + +Writable.prototype._destroy = function (err, cb) { + cb(err); +}; + +/***/ }), + +/***/ 37558: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var _Object$setPrototypeO; + +function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } + +var finished = __nccwpck_require__(12659); + +var kLastResolve = Symbol('lastResolve'); +var kLastReject = Symbol('lastReject'); +var kError = Symbol('error'); +var kEnded = Symbol('ended'); +var kLastPromise = Symbol('lastPromise'); +var kHandlePromise = Symbol('handlePromise'); +var kStream = Symbol('stream'); + +function createIterResult(value, done) { + return { + value: value, + done: done + }; +} + +function readAndResolve(iter) { + var resolve = iter[kLastResolve]; + + if (resolve !== null) { + var data = iter[kStream].read(); // we defer if data is null + // we can be expecting either 'end' or + // 'error' + + if (data !== null) { + iter[kLastPromise] = null; + iter[kLastResolve] = null; + iter[kLastReject] = null; + resolve(createIterResult(data, false)); + } + } +} + +function onReadable(iter) { + // we wait for the next tick, because it might + // emit an error with process.nextTick + process.nextTick(readAndResolve, iter); +} + +function wrapForNext(lastPromise, iter) { + return function (resolve, reject) { + lastPromise.then(function () { + if (iter[kEnded]) { + resolve(createIterResult(undefined, true)); + return; + } + + iter[kHandlePromise](resolve, reject); + }, reject); + }; +} + +var AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); +var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = { + get stream() { + return this[kStream]; + }, + + next: function next() { + var _this = this; + + // if we have detected an error in the meanwhile + // reject straight away + var error = this[kError]; + + if (error !== null) { + return Promise.reject(error); + } + + if (this[kEnded]) { + return Promise.resolve(createIterResult(undefined, true)); + } + + if (this[kStream].destroyed) { + // We need to defer via nextTick because if .destroy(err) is + // called, the error will be emitted via nextTick, and + // we cannot guarantee that there is no error lingering around + // waiting to be emitted. + return new Promise(function (resolve, reject) { + process.nextTick(function () { + if (_this[kError]) { + reject(_this[kError]); + } else { + resolve(createIterResult(undefined, true)); + } + }); + }); + } // if we have multiple next() calls + // we will wait for the previous Promise to finish + // this logic is optimized to support for await loops, + // where next() is only called once at a time + + + var lastPromise = this[kLastPromise]; + var promise; + + if (lastPromise) { + promise = new Promise(wrapForNext(lastPromise, this)); + } else { + // fast path needed to support multiple this.push() + // without triggering the next() queue + var data = this[kStream].read(); + + if (data !== null) { + return Promise.resolve(createIterResult(data, false)); + } + + promise = new Promise(this[kHandlePromise]); + } + + this[kLastPromise] = promise; + return promise; + } +}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () { + return this; +}), _defineProperty(_Object$setPrototypeO, "return", function _return() { + var _this2 = this; + + // destroy(err, cb) is a private API + // we can guarantee we have that here, because we control the + // Readable class this is attached to + return new Promise(function (resolve, reject) { + _this2[kStream].destroy(null, function (err) { + if (err) { + reject(err); + return; + } + + resolve(createIterResult(undefined, true)); + }); + }); +}), _Object$setPrototypeO), AsyncIteratorPrototype); + +var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) { + var _Object$create; + + var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, { + value: stream, + writable: true + }), _defineProperty(_Object$create, kLastResolve, { + value: null, + writable: true + }), _defineProperty(_Object$create, kLastReject, { + value: null, + writable: true + }), _defineProperty(_Object$create, kError, { + value: null, + writable: true + }), _defineProperty(_Object$create, kEnded, { + value: stream._readableState.endEmitted, + writable: true + }), _defineProperty(_Object$create, kHandlePromise, { + value: function value(resolve, reject) { + var data = iterator[kStream].read(); + + if (data) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(data, false)); + } else { + iterator[kLastResolve] = resolve; + iterator[kLastReject] = reject; + } + }, + writable: true + }), _Object$create)); + iterator[kLastPromise] = null; + finished(stream, function (err) { + if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { + var reject = iterator[kLastReject]; // reject if we are waiting for data in the Promise + // returned by next() and store the error + + if (reject !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + reject(err); + } + + iterator[kError] = err; + return; + } + + var resolve = iterator[kLastResolve]; + + if (resolve !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(undefined, true)); + } + + iterator[kEnded] = true; + }); + stream.on('readable', onReadable.bind(null, iterator)); + return iterator; +}; + +module.exports = createReadableStreamAsyncIterator; + +/***/ }), + +/***/ 70662: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -var DuplexStream = __nccwpck_require__(51642).Duplex - , util = __nccwpck_require__(31669) -function BufferList (callback) { - if (!(this instanceof BufferList)) - return new BufferList(callback) +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; } - this._bufs = [] - this.length = 0 +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; } - if (typeof callback == 'function') { - this._callback = callback +function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } - var piper = function piper (err) { - if (this._callback) { - this._callback(err) - this._callback = null - } - }.bind(this) +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - this.on('pipe', function onPipe (src) { - src.on('error', piper) - }) - this.on('unpipe', function onUnpipe (src) { - src.removeListener('error', piper) - }) - } else { - this.append(callback) - } +function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } - DuplexStream.call(this) +function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } + +var _require = __nccwpck_require__(64293), + Buffer = _require.Buffer; + +var _require2 = __nccwpck_require__(31669), + inspect = _require2.inspect; + +var custom = inspect && inspect.custom || 'inspect'; + +function copyBuffer(src, target, offset) { + Buffer.prototype.copy.call(src, target, offset); } +module.exports = +/*#__PURE__*/ +function () { + function BufferList() { + _classCallCheck(this, BufferList); -util.inherits(BufferList, DuplexStream) + this.head = null; + this.tail = null; + this.length = 0; + } + + _createClass(BufferList, [{ + key: "push", + value: function push(v) { + var entry = { + data: v, + next: null + }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + } + }, { + key: "unshift", + value: function unshift(v) { + var entry = { + data: v, + next: this.head + }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + } + }, { + key: "shift", + value: function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + } + }, { + key: "clear", + value: function clear() { + this.head = this.tail = null; + this.length = 0; + } + }, { + key: "join", + value: function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) { + ret += s + p.data; + } -BufferList.prototype._offset = function _offset (offset) { - var tot = 0, i = 0, _t - if (offset === 0) return [ 0, 0 ] - for (; i < this._bufs.length; i++) { - _t = tot + this._bufs[i].length - if (offset < _t || i == this._bufs.length - 1) { - return [ i, offset - tot ] + return ret; } - tot = _t - } -} + }, { + key: "concat", + value: function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; -BufferList.prototype._reverseOffset = function (blOffset) { - var bufferId = blOffset[0] - var offset = blOffset[1] - for (var i = 0; i < bufferId; i++) { - offset += this._bufs[i].length + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + + return ret; + } // Consumes a specified amount of bytes or characters from the buffered data. + + }, { + key: "consume", + value: function consume(n, hasStrings) { + var ret; + + if (n < this.head.data.length) { + // `slice` is the same for buffers and strings. + ret = this.head.data.slice(0, n); + this.head.data = this.head.data.slice(n); + } else if (n === this.head.data.length) { + // First chunk is a perfect match. + ret = this.shift(); + } else { + // Result spans more than one buffer. + ret = hasStrings ? this._getString(n) : this._getBuffer(n); + } + + return ret; + } + }, { + key: "first", + value: function first() { + return this.head.data; + } // Consumes a specified amount of characters from the buffered data. + + }, { + key: "_getString", + value: function _getString(n) { + var p = this.head; + var c = 1; + var ret = p.data; + n -= ret.length; + + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = str.slice(nb); + } + + break; + } + + ++c; + } + + this.length -= c; + return ret; + } // Consumes a specified amount of bytes from the buffered data. + + }, { + key: "_getBuffer", + value: function _getBuffer(n) { + var ret = Buffer.allocUnsafe(n); + var p = this.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = buf.slice(nb); + } + + break; + } + + ++c; + } + + this.length -= c; + return ret; + } // Make sure the linked list only shows the minimal necessary information. + + }, { + key: custom, + value: function value(_, options) { + return inspect(this, _objectSpread({}, options, { + // Only inspect one level. + depth: 0, + // It should not recurse. + customInspect: false + })); + } + }]); + + return BufferList; +}(); + +/***/ }), + +/***/ 36994: +/***/ ((module) => { + +"use strict"; + // undocumented cb() API, needed for core, not for public API + +function destroy(err, cb) { + var _this = this; + + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err) { + if (!this._writableState) { + process.nextTick(emitErrorNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + process.nextTick(emitErrorNT, this, err); + } + } + + return this; + } // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + + if (this._readableState) { + this._readableState.destroyed = true; + } // if this is a duplex stream mark the writable part as destroyed as well + + + if (this._writableState) { + this._writableState.destroyed = true; } - return offset + + this._destroy(err || null, function (err) { + if (!cb && err) { + if (!_this._writableState) { + process.nextTick(emitErrorAndCloseNT, _this, err); + } else if (!_this._writableState.errorEmitted) { + _this._writableState.errorEmitted = true; + process.nextTick(emitErrorAndCloseNT, _this, err); + } else { + process.nextTick(emitCloseNT, _this); + } + } else if (cb) { + process.nextTick(emitCloseNT, _this); + cb(err); + } else { + process.nextTick(emitCloseNT, _this); + } + }); + + return this; } -BufferList.prototype.append = function append (buf) { - var i = 0 +function emitErrorAndCloseNT(self, err) { + emitErrorNT(self, err); + emitCloseNT(self); +} - if (Buffer.isBuffer(buf)) { - this._appendBuffer(buf) - } else if (Array.isArray(buf)) { - for (; i < buf.length; i++) - this.append(buf[i]) - } else if (buf instanceof BufferList) { - // unwrap argument into individual BufferLists - for (; i < buf._bufs.length; i++) - this.append(buf._bufs[i]) - } else if (buf != null) { - // coerce number arguments to strings, since Buffer(number) does - // uninitialized memory allocation - if (typeof buf == 'number') - buf = buf.toString() +function emitCloseNT(self) { + if (self._writableState && !self._writableState.emitClose) return; + if (self._readableState && !self._readableState.emitClose) return; + self.emit('close'); +} - this._appendBuffer(Buffer.from(buf)) +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; } - return this + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finalCalled = false; + this._writableState.prefinished = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } } +function emitErrorNT(self, err) { + self.emit('error', err); +} -BufferList.prototype._appendBuffer = function appendBuffer (buf) { - this._bufs.push(buf) - this.length += buf.length +function errorOrDestroy(stream, err) { + // We have tests that rely on errors being emitted + // in the same tick, so changing this is semver major. + // For now when you opt-in to autoDestroy we allow + // the error to be emitted nextTick. In a future + // semver major update we should change the default to this. + var rState = stream._readableState; + var wState = stream._writableState; + if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); } +module.exports = { + destroy: destroy, + undestroy: undestroy, + errorOrDestroy: errorOrDestroy +}; -BufferList.prototype._write = function _write (buf, encoding, callback) { - this._appendBuffer(buf) +/***/ }), - if (typeof callback == 'function') - callback() -} +/***/ 12659: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +"use strict"; +// Ported from https://github.com/mafintosh/end-of-stream with +// permission from the author, Mathias Buus (@mafintosh). -BufferList.prototype._read = function _read (size) { - if (!this.length) - return this.push(null) - size = Math.min(size, this.length) - this.push(this.slice(0, size)) - this.consume(size) -} +var ERR_STREAM_PREMATURE_CLOSE = __nccwpck_require__(97713)/* .codes.ERR_STREAM_PREMATURE_CLOSE */ .q.ERR_STREAM_PREMATURE_CLOSE; +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; -BufferList.prototype.end = function end (chunk) { - DuplexStream.prototype.end.call(this, chunk) + for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; + } - if (this._callback) { - this._callback(null, this.slice()) - this._callback = null - } + callback.apply(this, args); + }; } +function noop() {} -BufferList.prototype.get = function get (index) { - if (index > this.length || index < 0) { - return undefined - } - var offset = this._offset(index) - return this._bufs[offset[0]][offset[1]] +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; } +function eos(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + callback = once(callback || noop); + var readable = opts.readable || opts.readable !== false && stream.readable; + var writable = opts.writable || opts.writable !== false && stream.writable; -BufferList.prototype.slice = function slice (start, end) { - if (typeof start == 'number' && start < 0) - start += this.length - if (typeof end == 'number' && end < 0) - end += this.length - return this.copy(null, 0, start, end) -} + var onlegacyfinish = function onlegacyfinish() { + if (!stream.writable) onfinish(); + }; + var writableEnded = stream._writableState && stream._writableState.finished; -BufferList.prototype.copy = function copy (dst, dstStart, srcStart, srcEnd) { - if (typeof srcStart != 'number' || srcStart < 0) - srcStart = 0 - if (typeof srcEnd != 'number' || srcEnd > this.length) - srcEnd = this.length - if (srcStart >= this.length) - return dst || Buffer.alloc(0) - if (srcEnd <= 0) - return dst || Buffer.alloc(0) + var onfinish = function onfinish() { + writable = false; + writableEnded = true; + if (!readable) callback.call(stream); + }; - var copy = !!dst - , off = this._offset(srcStart) - , len = srcEnd - srcStart - , bytes = len - , bufoff = (copy && dstStart) || 0 - , start = off[1] - , l - , i + var readableEnded = stream._readableState && stream._readableState.endEmitted; - // copy/slice everything - if (srcStart === 0 && srcEnd == this.length) { - if (!copy) { // slice, but full concat if multiple buffers - return this._bufs.length === 1 - ? this._bufs[0] - : Buffer.concat(this._bufs, this.length) + var onend = function onend() { + readable = false; + readableEnded = true; + if (!writable) callback.call(stream); + }; + + var onerror = function onerror(err) { + callback.call(stream, err); + }; + + var onclose = function onclose() { + var err; + + if (readable && !readableEnded) { + if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); } - // copy, need to copy individual buffers - for (i = 0; i < this._bufs.length; i++) { - this._bufs[i].copy(dst, bufoff) - bufoff += this._bufs[i].length + if (writable && !writableEnded) { + if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); } + }; - return dst - } + var onrequest = function onrequest() { + stream.req.on('finish', onfinish); + }; - // easy, cheap case where it's a subset of one of the buffers - if (bytes <= this._bufs[off[0]].length - start) { - return copy - ? this._bufs[off[0]].copy(dst, dstStart, start, start + bytes) - : this._bufs[off[0]].slice(start, start + bytes) + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest();else stream.on('request', onrequest); + } else if (writable && !stream._writableState) { + // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); } - if (!copy) // a slice, we need something to copy in to - dst = Buffer.allocUnsafe(len) + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + return function () { + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +} - for (i = off[0]; i < this._bufs.length; i++) { - l = this._bufs[i].length - start +module.exports = eos; - if (bytes > l) { - this._bufs[i].copy(dst, bufoff, start) - bufoff += l - } else { - this._bufs[i].copy(dst, bufoff, start, start + bytes) - bufoff += l - break - } +/***/ }), - bytes -= l +/***/ 57039: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (start) - start = 0 - } +"use strict"; - // safeguard so that we don't return uninitialized memory - if (dst.length > bufoff) return dst.slice(0, bufoff) - return dst -} +function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } -BufferList.prototype.shallowSlice = function shallowSlice (start, end) { - start = start || 0 - end = typeof end !== 'number' ? this.length : end +function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } - if (start < 0) - start += this.length - if (end < 0) - end += this.length +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; } - if (start === end) { - return new BufferList() - } - var startOffset = this._offset(start) - , endOffset = this._offset(end) - , buffers = this._bufs.slice(startOffset[0], endOffset[0] + 1) +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; } - if (endOffset[1] == 0) - buffers.pop() - else - buffers[buffers.length-1] = buffers[buffers.length-1].slice(0, endOffset[1]) +function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } - if (startOffset[1] != 0) - buffers[0] = buffers[0].slice(startOffset[1]) +var ERR_INVALID_ARG_TYPE = __nccwpck_require__(97713)/* .codes.ERR_INVALID_ARG_TYPE */ .q.ERR_INVALID_ARG_TYPE; - return new BufferList(buffers) -} +function from(Readable, iterable, opts) { + var iterator; -BufferList.prototype.toString = function toString (encoding, start, end) { - return this.slice(start, end).toString(encoding) -} + if (iterable && typeof iterable.next === 'function') { + iterator = iterable; + } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); -BufferList.prototype.consume = function consume (bytes) { - // first, normalize the argument, in accordance with how Buffer does it - bytes = Math.trunc(bytes) - // do nothing if not a positive number - if (Number.isNaN(bytes) || bytes <= 0) return this + var readable = new Readable(_objectSpread({ + objectMode: true + }, opts)); // Reading boolean to protect against _read + // being called before last iteration completion. - while (this._bufs.length) { - if (bytes >= this._bufs[0].length) { - bytes -= this._bufs[0].length - this.length -= this._bufs[0].length - this._bufs.shift() - } else { - this._bufs[0] = this._bufs[0].slice(bytes) - this.length -= bytes - break + var reading = false; + + readable._read = function () { + if (!reading) { + reading = true; + next(); } + }; + + function next() { + return _next2.apply(this, arguments); } - return this + + function _next2() { + _next2 = _asyncToGenerator(function* () { + try { + var _ref = yield iterator.next(), + value = _ref.value, + done = _ref.done; + + if (done) { + readable.push(null); + } else if (readable.push((yield value))) { + next(); + } else { + reading = false; + } + } catch (err) { + readable.destroy(err); + } + }); + return _next2.apply(this, arguments); + } + + return readable; } +module.exports = from; -BufferList.prototype.duplicate = function duplicate () { - var i = 0 - , copy = new BufferList() +/***/ }), - for (; i < this._bufs.length; i++) - copy.append(this._bufs[i]) +/***/ 20740: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - return copy +"use strict"; +// Ported from https://github.com/mafintosh/pump with +// permission from the author, Mathias Buus (@mafintosh). + + +var eos; + +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + callback.apply(void 0, arguments); + }; } +var _require$codes = __nccwpck_require__(97713)/* .codes */ .q, + ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; -BufferList.prototype._destroy = function _destroy (err, cb) { - this._bufs.length = 0 - this.length = 0 - cb(err) +function noop(err) { + // Rethrow the error if it exists to avoid swallowing it + if (err) throw err; } +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} -BufferList.prototype.indexOf = function (search, offset, encoding) { - if (encoding === undefined && typeof offset === 'string') { - encoding = offset - offset = undefined - } - if (typeof search === 'function' || Array.isArray(search)) { - throw new TypeError('The "value" argument must be one of type string, Buffer, BufferList, or Uint8Array.') - } else if (typeof search === 'number') { - search = Buffer.from([search]) - } else if (typeof search === 'string') { - search = Buffer.from(search, encoding) - } else if (search instanceof BufferList) { - search = search.slice() - } else if (!Buffer.isBuffer(search)) { - search = Buffer.from(search) - } +function destroyer(stream, reading, writing, callback) { + callback = once(callback); + var closed = false; + stream.on('close', function () { + closed = true; + }); + if (eos === undefined) eos = __nccwpck_require__(12659); + eos(stream, { + readable: reading, + writable: writing + }, function (err) { + if (err) return callback(err); + closed = true; + callback(); + }); + var destroyed = false; + return function (err) { + if (closed) return; + if (destroyed) return; + destroyed = true; // request.destroy just do .end - .abort is what we want - offset = Number(offset || 0) - if (isNaN(offset)) { - offset = 0 - } + if (isRequest(stream)) return stream.abort(); + if (typeof stream.destroy === 'function') return stream.destroy(); + callback(err || new ERR_STREAM_DESTROYED('pipe')); + }; +} - if (offset < 0) { - offset = this.length + offset - } +function call(fn) { + fn(); +} - if (offset < 0) { - offset = 0 - } +function pipe(from, to) { + return from.pipe(to); +} - if (search.length === 0) { - return offset > this.length ? this.length : offset +function popCallback(streams) { + if (!streams.length) return noop; + if (typeof streams[streams.length - 1] !== 'function') return noop; + return streams.pop(); +} + +function pipeline() { + for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { + streams[_key] = arguments[_key]; } - var blOffset = this._offset(offset) - var blIndex = blOffset[0] // index of which internal buffer we're working on - var buffOffset = blOffset[1] // offset of the internal buffer we're working on + var callback = popCallback(streams); + if (Array.isArray(streams[0])) streams = streams[0]; - // scan over each buffer - for (blIndex; blIndex < this._bufs.length; blIndex++) { - var buff = this._bufs[blIndex] - while(buffOffset < buff.length) { - var availableWindow = buff.length - buffOffset - if (availableWindow >= search.length) { - var nativeSearchResult = buff.indexOf(search, buffOffset) - if (nativeSearchResult !== -1) { - return this._reverseOffset([blIndex, nativeSearchResult]) - } - buffOffset = buff.length - search.length + 1 // end of native search window - } else { - var revOffset = this._reverseOffset([blIndex, buffOffset]) - if (this._match(revOffset, search)) { - return revOffset - } - buffOffset++ - } - } - buffOffset = 0 + if (streams.length < 2) { + throw new ERR_MISSING_ARGS('streams'); } - return -1 + + var error; + var destroys = streams.map(function (stream, i) { + var reading = i < streams.length - 1; + var writing = i > 0; + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err; + if (err) destroys.forEach(call); + if (reading) return; + destroys.forEach(call); + callback(error); + }); + }); + return streams.reduce(pipe); } -BufferList.prototype._match = function(offset, search) { - if (this.length - offset < search.length) { - return false - } - for (var searchOffset = 0; searchOffset < search.length ; searchOffset++) { - if(this.get(offset + searchOffset) !== search[searchOffset]){ - return false +module.exports = pipeline; + +/***/ }), + +/***/ 3533: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var ERR_INVALID_OPT_VALUE = __nccwpck_require__(97713)/* .codes.ERR_INVALID_OPT_VALUE */ .q.ERR_INVALID_OPT_VALUE; + +function highWaterMarkFrom(options, isDuplex, duplexKey) { + return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; +} + +function getHighWaterMark(state, options, duplexKey, isDuplex) { + var hwm = highWaterMarkFrom(options, isDuplex, duplexKey); + + if (hwm != null) { + if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { + var name = isDuplex ? duplexKey : 'highWaterMark'; + throw new ERR_INVALID_OPT_VALUE(name, hwm); } - } - return true + + return Math.floor(hwm); + } // Default value + + + return state.objectMode ? 16 : 16 * 1024; } +module.exports = { + getHighWaterMark: getHighWaterMark +}; -;(function () { - var methods = { - 'readDoubleBE' : 8 - , 'readDoubleLE' : 8 - , 'readFloatBE' : 4 - , 'readFloatLE' : 4 - , 'readInt32BE' : 4 - , 'readInt32LE' : 4 - , 'readUInt32BE' : 4 - , 'readUInt32LE' : 4 - , 'readInt16BE' : 2 - , 'readInt16LE' : 2 - , 'readUInt16BE' : 2 - , 'readUInt16LE' : 2 - , 'readInt8' : 1 - , 'readUInt8' : 1 - , 'readIntBE' : null - , 'readIntLE' : null - , 'readUIntBE' : null - , 'readUIntLE' : null - } +/***/ }), - for (var m in methods) { - (function (m) { - if (methods[m] === null) { - BufferList.prototype[m] = function (offset, byteLength) { - return this.slice(offset, offset + byteLength)[m](0, byteLength) - } - } - else { - BufferList.prototype[m] = function (offset) { - return this.slice(offset, offset + methods[m])[m](0) - } - } - }(m)) - } -}()) +/***/ 36238: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +module.exports = __nccwpck_require__(92413); -module.exports = BufferList + +/***/ }), + +/***/ 85519: +/***/ ((module, exports, __nccwpck_require__) => { + +var Stream = __nccwpck_require__(92413); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream.Readable; + Object.assign(module.exports, Stream); + module.exports.Stream = Stream; +} else { + exports = module.exports = __nccwpck_require__(85209); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = __nccwpck_require__(58729); + exports.Duplex = __nccwpck_require__(13928); + exports.Transform = __nccwpck_require__(26753); + exports.PassThrough = __nccwpck_require__(4991); + exports.finished = __nccwpck_require__(12659); + exports.pipeline = __nccwpck_require__(20740); +} /***/ }), @@ -37393,7 +39340,7 @@ function highlight(code, options) { if (options === void 0) { options = {}; } var html; if (options.language) { - html = hljs.highlight(code, { language: options.language, ignoreIllegals: options.ignoreIllegals }).value; + html = hljs.highlight(options.language, code, options.ignoreIllegals, options.continuation).value; } else { html = hljs.highlightAuto(code, options.languageSubset).value; @@ -47300,40 +49247,6 @@ var types_1 = __nccwpck_require__(30920); Object.defineProperty(exports, "HostType", ({ enumerable: true, get: function () { return types_1.HostType; } })); -/***/ }), - -/***/ 2602: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.setupCustomInspect = void 0; -var inspect = __nccwpck_require__(31669).inspect; -var EOL = __nccwpck_require__(12087).EOL; -var setupCustomInspect = function (obj) { - // istanbul ignore else - if (true) { - var inspecting_1 = false; - Object.defineProperty(obj.prototype, inspect.custom, { - value: function () { - if (inspecting_1) { - return this; - } - inspecting_1 = true; - var options = { colors: process.stdout.isTTY }; - var src = inspect(this, options); - var _a = this, host = _a.host, hostname = _a.hostname, port = _a.port, type = _a.type; - var vp = inspect({ host: host, hostname: hostname, port: port, type: type }, options); - inspecting_1 = false; - return "" + src + EOL + "Virtual Properties: " + vp; - } - }); - } -}; -exports.setupCustomInspect = setupCustomInspect; - - /***/ }), /***/ 22382: @@ -47341,16 +49254,19 @@ exports.setupCustomInspect = setupCustomInspect; "use strict"; -var __spreadArray = (this && this.__spreadArray) || function (to, from) { - for (var i = 0, il = from.length, j = to.length; i < il; i++, j++) - to[j] = from[i]; - return to; +var __spreadArrays = (this && this.__spreadArrays) || function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.ConnectionString = void 0; +var util_1 = __nccwpck_require__(31669); +var os_1 = __nccwpck_require__(12087); var types_1 = __nccwpck_require__(30920); var static_1 = __nccwpck_require__(50022); -var inspect_1 = __nccwpck_require__(2602); var errInvalidDefaults = "Invalid \"defaults\" parameter: "; var ConnectionString = /** @class */ (function () { /** @@ -47439,7 +49355,7 @@ var ConnectionString = /** @class */ (function () { (_a = params_1[prop]).push.apply(_a, val); } else { - params_1[prop] = __spreadArray([params_1[prop]], val); + params_1[prop] = __spreadArrays([params_1[prop]], val); } } else { @@ -47656,7 +49572,24 @@ exports.ConnectionString = ConnectionString; desc.enumerable = false; Object.defineProperty(ConnectionString.prototype, prop, desc); }); - inspect_1.setupCustomInspect(ConnectionString); + var inspecting = false; + // istanbul ignore else + if (util_1.inspect.custom) { + Object.defineProperty(ConnectionString.prototype, util_1.inspect.custom, { + value: function () { + if (inspecting) { + return this; + } + inspecting = true; + var options = { colors: process.stdout.isTTY }; + var src = util_1.inspect(this, options); + var _a = this, host = _a.host, hostname = _a.hostname, port = _a.port, type = _a.type; + var vp = util_1.inspect({ host: host, hostname: hostname, port: port, type: type }, options); + inspecting = false; + return "" + src + os_1.EOL + "Virtual Properties: " + vp; + } + }); + } })(); @@ -53872,9 +55805,8 @@ var assert = __nccwpck_require__(42357); var debug = __nccwpck_require__(31133); // Create handlers that pass events from native requests -var events = ["abort", "aborted", "connect", "error", "socket", "timeout"]; var eventHandlers = Object.create(null); -events.forEach(function (event) { +["abort", "aborted", "connect", "error", "socket", "timeout"].forEach(function (event) { eventHandlers[event] = function (arg1, arg2, arg3) { this._redirectable.emit(event, arg1, arg2, arg3); }; @@ -53928,8 +55860,14 @@ function RedirectableRequest(options, responseCallback) { RedirectableRequest.prototype = Object.create(Writable.prototype); RedirectableRequest.prototype.abort = function () { - abortRequest(this._currentRequest); + // Abort the internal request + this._currentRequest.removeAllListeners(); + this._currentRequest.on("error", noop); + this._currentRequest.abort(); + + // Abort this request this.emit("abort"); + this.removeAllListeners(); }; // Writes buffered data to the current native request @@ -54016,14 +55954,8 @@ RedirectableRequest.prototype.setTimeout = function (msecs, callback) { this.on("timeout", callback); } - function destroyOnTimeout(socket) { - socket.setTimeout(msecs); - socket.removeListener("timeout", socket.destroy); - socket.addListener("timeout", socket.destroy); - } - // Sets up a timer to trigger a timeout event - function startTimer(socket) { + function startTimer() { if (self._timeout) { clearTimeout(self._timeout); } @@ -54031,7 +55963,6 @@ RedirectableRequest.prototype.setTimeout = function (msecs, callback) { self.emit("timeout"); clearTimer(); }, msecs); - destroyOnTimeout(socket); } // Prevent a timeout from triggering @@ -54047,13 +55978,12 @@ RedirectableRequest.prototype.setTimeout = function (msecs, callback) { // Start the timer when the socket is opened if (this.socket) { - startTimer(this.socket); + startTimer(); } else { this._currentRequest.once("socket", startTimer); } - this.on("socket", destroyOnTimeout); this.once("response", clearTimer); this.once("error", clearTimer); @@ -54132,8 +56062,11 @@ RedirectableRequest.prototype._performRequest = function () { // Set up event handlers request._redirectable = this; - for (var e = 0; e < events.length; e++) { - request.on(events[e], eventHandlers[events[e]]); + for (var event in eventHandlers) { + /* istanbul ignore else */ + if (event) { + request.on(event, eventHandlers[event]); + } } // End a redirected request @@ -54191,7 +56124,9 @@ RedirectableRequest.prototype._processResponse = function (response) { if (location && this._options.followRedirects !== false && statusCode >= 300 && statusCode < 400) { // Abort the current request - abortRequest(this._currentRequest); + this._currentRequest.removeAllListeners(); + this._currentRequest.on("error", noop); + this._currentRequest.abort(); // Discard the remainder of the response to avoid waiting for data response.destroy(); @@ -54383,14 +56318,6 @@ function createErrorType(code, defaultMessage) { return CustomError; } -function abortRequest(request) { - for (var e = 0; e < events.length; e++) { - request.removeListener(events[e], eventHandlers[events[e]]); - } - request.on("error", noop); - request.abort(); -} - // Exports module.exports = wrap({ http: http, https: https }); module.exports.wrap = wrap; @@ -55722,100 +57649,13 @@ Gauge.prototype._doRedraw = function () { /***/ }), -/***/ 53737: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -var numberIsNan = __nccwpck_require__(16325); - -module.exports = function (x) { - if (numberIsNan(x)) { - return false; - } - - // https://github.com/nodejs/io.js/blob/cff7300a578be1b10001f2d967aaedc88aee6402/lib/readline.js#L1369 - - // code points are derived from: - // http://www.unix.org/Public/UNIDATA/EastAsianWidth.txt - if (x >= 0x1100 && ( - x <= 0x115f || // Hangul Jamo - 0x2329 === x || // LEFT-POINTING ANGLE BRACKET - 0x232a === x || // RIGHT-POINTING ANGLE BRACKET - // CJK Radicals Supplement .. Enclosed CJK Letters and Months - (0x2e80 <= x && x <= 0x3247 && x !== 0x303f) || - // Enclosed CJK Letters and Months .. CJK Unified Ideographs Extension A - 0x3250 <= x && x <= 0x4dbf || - // CJK Unified Ideographs .. Yi Radicals - 0x4e00 <= x && x <= 0xa4c6 || - // Hangul Jamo Extended-A - 0xa960 <= x && x <= 0xa97c || - // Hangul Syllables - 0xac00 <= x && x <= 0xd7a3 || - // CJK Compatibility Ideographs - 0xf900 <= x && x <= 0xfaff || - // Vertical Forms - 0xfe10 <= x && x <= 0xfe19 || - // CJK Compatibility Forms .. Small Form Variants - 0xfe30 <= x && x <= 0xfe6b || - // Halfwidth and Fullwidth Forms - 0xff01 <= x && x <= 0xff60 || - 0xffe0 <= x && x <= 0xffe6 || - // Kana Supplement - 0x1b000 <= x && x <= 0x1b001 || - // Enclosed Ideographic Supplement - 0x1f200 <= x && x <= 0x1f251 || - // CJK Unified Ideographs Extension B .. Tertiary Ideographic Plane - 0x20000 <= x && x <= 0x3fffd)) { - return true; - } - - return false; -} - - -/***/ }), - -/***/ 56529: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 85899: +/***/ ((module) => { "use strict"; -var stripAnsi = __nccwpck_require__(41360); -var codePointAt = __nccwpck_require__(68929); -var isFullwidthCodePoint = __nccwpck_require__(53737); - -// https://github.com/nodejs/io.js/blob/cff7300a578be1b10001f2d967aaedc88aee6402/lib/readline.js#L1345 -module.exports = function (str) { - if (typeof str !== 'string' || str.length === 0) { - return 0; - } - - var width = 0; - - str = stripAnsi(str); - - for (var i = 0; i < str.length; i++) { - var code = codePointAt(str, i); - - // ignore control characters - if (code <= 0x1f || (code >= 0x7f && code <= 0x9f)) { - continue; - } - - // surrogates - if (code >= 0x10000) { - i++; - } - - if (isFullwidthCodePoint(code)) { - width += 2; - } else { - width++; - } - } - - return width; +module.exports = function () { + return /[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-PRZcf-nqry=><]/g; }; @@ -55826,7 +57666,7 @@ module.exports = function (str) { "use strict"; -var ansiRegex = __nccwpck_require__(65063)(); +var ansiRegex = __nccwpck_require__(85899)(); module.exports = function (str) { return typeof str === 'string' ? str.replace(ansiRegex, '') : str; @@ -55910,7 +57750,7 @@ module.exports = process var validate = __nccwpck_require__(83278) var renderTemplate = __nccwpck_require__(13444) var wideTruncate = __nccwpck_require__(8413) -var stringWidth = __nccwpck_require__(56529) +var stringWidth = __nccwpck_require__(42577) module.exports = function (theme, width, completed) { validate('ONN', [theme, width, completed]) @@ -56178,7 +58018,7 @@ module.exports = function spin (spinstr, spun) { "use strict"; -var stringWidth = __nccwpck_require__(56529) +var stringWidth = __nccwpck_require__(42577) module.exports = TemplateItem @@ -56444,7 +58284,7 @@ themes.setDefault({platform: 'darwin', hasUnicode: true, hasColor: true}, 'color "use strict"; -var stringWidth = __nccwpck_require__(56529) +var stringWidth = __nccwpck_require__(42577) var stripAnsi = __nccwpck_require__(41360) module.exports = wideTruncate @@ -56475,6 +58315,8 @@ function wideTruncate (str, target) { /***/ 47625: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +exports.alphasort = alphasort +exports.alphasorti = alphasorti exports.setopts = setopts exports.ownProp = ownProp exports.makeAbs = makeAbs @@ -56492,8 +58334,12 @@ var minimatch = __nccwpck_require__(83973) var isAbsolute = __nccwpck_require__(38714) var Minimatch = minimatch.Minimatch +function alphasorti (a, b) { + return a.toLowerCase().localeCompare(b.toLowerCase()) +} + function alphasort (a, b) { - return a.localeCompare(b, 'en') + return a.localeCompare(b) } function setupIgnores (self, options) { @@ -56621,7 +58467,7 @@ function finish (self) { all = Object.keys(all) if (!self.nosort) - all = all.sort(alphasort) + all = all.sort(self.nocase ? alphasorti : alphasort) // at *some* point we statted all of these if (self.mark) { @@ -56769,6 +58615,8 @@ var assert = __nccwpck_require__(42357) var isAbsolute = __nccwpck_require__(38714) var globSync = __nccwpck_require__(29010) var common = __nccwpck_require__(47625) +var alphasort = common.alphasort +var alphasorti = common.alphasorti var setopts = common.setopts var ownProp = common.ownProp var inflight = __nccwpck_require__(52492) @@ -57524,6 +59372,8 @@ var path = __nccwpck_require__(85622) var assert = __nccwpck_require__(42357) var isAbsolute = __nccwpck_require__(38714) var common = __nccwpck_require__(47625) +var alphasort = common.alphasort +var alphasorti = common.alphasorti var setopts = common.setopts var ownProp = common.ownProp var childrenIgnored = common.childrenIgnored @@ -58235,7 +60085,6 @@ var deepFreezeEs6 = deepFreeze; var _default = deepFreeze; deepFreezeEs6.default = _default; -/** @implements CallbackResponse */ class Response { /** * @param {CompiledMode} mode @@ -58245,11 +60094,10 @@ class Response { if (mode.data === undefined) mode.data = {}; this.data = mode.data; - this.isMatchIgnored = false; } ignoreMatch() { - this.isMatchIgnored = true; + this.ignore = true; } } @@ -58596,15 +60444,6 @@ function startsWith(re, lexeme) { return match && match.index === 0; } -// BACKREF_RE matches an open parenthesis or backreference. To avoid -// an incorrect parse, it additionally matches the following: -// - [...] elements, where the meaning of parentheses and escapes change -// - other escape sequences, so we do not misparse escape sequences as -// interesting elements -// - non-matching or lookahead parentheses, which do not capture. These -// follow the '(' with a '?'. -const BACKREF_RE = /\[(?:[^\\\]]|\\.)*\]|\(\??|\\([1-9][0-9]*)|\\./; - // join logically computes regexps.join(separator), but fixes the // backreferences so they continue to match. // it also places each individual regular expression into it's own @@ -58616,34 +60455,45 @@ const BACKREF_RE = /\[(?:[^\\\]]|\\.)*\]|\(\??|\\([1-9][0-9]*)|\\./; * @returns {string} */ function join(regexps, separator = "|") { + // backreferenceRe matches an open parenthesis or backreference. To avoid + // an incorrect parse, it additionally matches the following: + // - [...] elements, where the meaning of parentheses and escapes change + // - other escape sequences, so we do not misparse escape sequences as + // interesting elements + // - non-matching or lookahead parentheses, which do not capture. These + // follow the '(' with a '?'. + const backreferenceRe = /\[(?:[^\\\]]|\\.)*\]|\(\??|\\([1-9][0-9]*)|\\./; let numCaptures = 0; - - return regexps.map((regex) => { + let ret = ''; + for (let i = 0; i < regexps.length; i++) { numCaptures += 1; const offset = numCaptures; - let re = source(regex); - let out = ''; - + let re = source(regexps[i]); + if (i > 0) { + ret += separator; + } + ret += "("; while (re.length > 0) { - const match = BACKREF_RE.exec(re); - if (!match) { - out += re; + const match = backreferenceRe.exec(re); + if (match == null) { + ret += re; break; } - out += re.substring(0, match.index); + ret += re.substring(0, match.index); re = re.substring(match.index + match[0].length); if (match[0][0] === '\\' && match[1]) { // Adjust the backreference. - out += '\\' + String(Number(match[1]) + offset); + ret += '\\' + String(Number(match[1]) + offset); } else { - out += match[0]; + ret += match[0]; if (match[0] === '(') { numCaptures++; } } } - return out; - }).map(re => `(${re})`).join(separator); + ret += ")"; + } + return ret; } // Common regexps @@ -59294,7 +61144,7 @@ function compileLanguage(language, { plugins }) { */ function compileMode(mode, parent) { const cmode = /** @type CompiledMode */ (mode); - if (mode.isCompiled) return cmode; + if (mode.compiled) return cmode; [ // do this early so compiler extensions generally don't have to worry about @@ -59316,7 +61166,7 @@ function compileLanguage(language, { plugins }) { compileRelevance ].forEach(ext => ext(mode, parent)); - mode.isCompiled = true; + mode.compiled = true; let keywordPattern = null; if (typeof mode.keywords === "object") { @@ -59435,7 +61285,7 @@ function expandOrCloneMode(mode) { return mode; } -var version = "10.7.2"; +var version = "10.6.0"; // @ts-nocheck @@ -59509,8 +61359,8 @@ function BuildVuePlugin(hljs) { /** @type {HLJSPlugin} */ const mergeHTMLPlugin = { - "after:highlightElement": ({ el, result, text }) => { - const originalStream = nodeStream(el); + "after:highlightBlock": ({ block, result, text }) => { + const originalStream = nodeStream(block); if (!originalStream.length) return; const resultNode = document.createElement('div'); @@ -59667,11 +61517,6 @@ https://github.com/highlightjs/highlight.js/issues/2880#issuecomment-747275419 */ -/** - * @type {Record} - */ -const seenDeprecations = {}; - /** * @param {string} message */ @@ -59692,10 +61537,7 @@ const warn = (message, ...args) => { * @param {string} message */ const deprecated = (version, message) => { - if (seenDeprecations[`${version}/${message}`]) return; - console.log(`Deprecated as of ${version}. ${message}`); - seenDeprecations[`${version}/${message}`] = true; }; /* @@ -59780,14 +61622,8 @@ const HLJS = function(hljs) { /** * Core highlighting function. * - * OLD API - * highlight(lang, code, ignoreIllegals, continuation) - * - * NEW API - * highlight(code, {lang, ignoreIllegals}) - * - * @param {string} codeOrlanguageName - the language to use for highlighting - * @param {string | HighlightOptions} optionsOrCode - the code to highlight + * @param {string} languageName - the language to use for highlighting + * @param {string} code - the code to highlight * @param {boolean} [ignoreIllegals] - whether to ignore illegal matches, default is to bail * @param {CompiledMode} [continuation] - current continuation mode, if any * @@ -59799,24 +61635,7 @@ const HLJS = function(hljs) { * @property {CompiledMode} top - top of the current mode stack * @property {boolean} illegal - indicates whether any illegal matches were found */ - function highlight(codeOrlanguageName, optionsOrCode, ignoreIllegals, continuation) { - let code = ""; - let languageName = ""; - if (typeof optionsOrCode === "object") { - code = codeOrlanguageName; - ignoreIllegals = optionsOrCode.ignoreIllegals; - languageName = optionsOrCode.language; - // continuation not supported at all via the new API - // eslint-disable-next-line no-undefined - continuation = undefined; - } else { - // old API - deprecated("10.7.0", "highlight(lang, code, ...args) has been deprecated."); - deprecated("10.7.0", "Please use highlight(code, options) instead.\nhttps://github.com/highlightjs/highlight.js/issues/2277"); - languageName = codeOrlanguageName; - code = optionsOrCode; - } - + function highlight(languageName, code, ignoreIllegals, continuation) { /** @type {BeforeHighlightContext} */ const context = { code, @@ -59843,12 +61662,14 @@ const HLJS = function(hljs) { * private highlight that's used internally and does not fire callbacks * * @param {string} languageName - the language to use for highlighting - * @param {string} codeToHighlight - the code to highlight - * @param {boolean?} [ignoreIllegals] - whether to ignore illegal matches, default is to bail - * @param {CompiledMode?} [continuation] - current continuation mode, if any + * @param {string} code - the code to highlight + * @param {boolean} [ignoreIllegals] - whether to ignore illegal matches, default is to bail + * @param {CompiledMode} [continuation] - current continuation mode, if any * @returns {HighlightResult} - result of the highlight operation */ - function _highlight(languageName, codeToHighlight, ignoreIllegals, continuation) { + function _highlight(languageName, code, ignoreIllegals, continuation) { + const codeToHighlight = code; + /** * Return keyword data if a match is a keyword * @param {CompiledMode} mode - current mode @@ -59880,14 +61701,8 @@ const HLJS = function(hljs) { buf = ""; relevance += keywordRelevance; - if (kind.startsWith("_")) { - // _ implied for relevance only, do not highlight - // by applying a class name - buf += match[0]; - } else { - const cssClass = language.classNameAliases[kind] || kind; - emitter.addKeyword(match[0], cssClass); - } + const cssClass = language.classNameAliases[kind] || kind; + emitter.addKeyword(match[0], cssClass); } else { buf += match[0]; } @@ -59957,7 +61772,7 @@ const HLJS = function(hljs) { if (mode["on:end"]) { const resp = new Response(mode); mode["on:end"](match, resp); - if (resp.isMatchIgnored) matched = false; + if (resp.ignore) matched = false; } if (matched) { @@ -60009,7 +61824,7 @@ const HLJS = function(hljs) { for (const cb of beforeCallbacks) { if (!cb) continue; cb(match, resp); - if (resp.isMatchIgnored) return doIgnore(lexeme); + if (resp.ignore) return doIgnore(lexeme); } if (newMode && newMode.endSameAsBegin) { @@ -60373,12 +62188,12 @@ const HLJS = function(hljs) { /** @type {HLJSPlugin} */ const brPlugin = { - "before:highlightElement": ({ el }) => { + "before:highlightBlock": ({ block }) => { if (options.useBR) { - el.innerHTML = el.innerHTML.replace(/\n/g, '').replace(//g, '\n'); + block.innerHTML = block.innerHTML.replace(/\n/g, '').replace(//g, '\n'); } }, - "after:highlightElement": ({ result }) => { + "after:highlightBlock": ({ result }) => { if (options.useBR) { result.value = result.value.replace(/\n/g, "
"); } @@ -60388,7 +62203,7 @@ const HLJS = function(hljs) { const TAB_REPLACE_RE = /^(<[^>]+>|\t)+/gm; /** @type {HLJSPlugin} */ const tabReplacePlugin = { - "after:highlightElement": ({ result }) => { + "after:highlightBlock": ({ result }) => { if (options.tabReplace) { result.value = result.value.replace(TAB_REPLACE_RE, (m) => m.replace(/\t/g, options.tabReplace) @@ -60403,23 +62218,21 @@ const HLJS = function(hljs) { * * @param {HighlightedHTMLElement} element - the HTML element to highlight */ - function highlightElement(element) { + function highlightBlock(element) { /** @type HTMLElement */ let node = null; const language = blockLanguage(element); if (shouldNotHighlight(language)) return; - // support for v10 API - fire("before:highlightElement", - { el: element, language: language }); + fire("before:highlightBlock", + { block: element, language: language }); node = element; const text = node.textContent; - const result = language ? highlight(text, { language, ignoreIllegals: true }) : highlightAuto(text); + const result = language ? highlight(language, text, true) : highlightAuto(text); - // support for v10 API - fire("after:highlightElement", { el: element, result, text }); + fire("after:highlightBlock", { block: element, result, text }); element.innerHTML = result.value; updateClassName(element, language, result.language); @@ -60465,7 +62278,7 @@ const HLJS = function(hljs) { deprecated("10.6.0", "initHighlighting() is deprecated. Use highlightAll() instead."); const blocks = document.querySelectorAll('pre code'); - blocks.forEach(highlightElement); + blocks.forEach(highlightBlock); }; // Higlights all when DOMContentLoaded fires @@ -60476,22 +62289,21 @@ const HLJS = function(hljs) { } let wantsHighlight = false; + let domLoaded = false; /** * auto-highlights all pre>code elements on the page */ function highlightAll() { // if we are called too early in the loading process - if (document.readyState === "loading") { - wantsHighlight = true; - return; - } + if (!domLoaded) { wantsHighlight = true; return; } const blocks = document.querySelectorAll('pre code'); - blocks.forEach(highlightElement); + blocks.forEach(highlightBlock); } function boot() { + domLoaded = true; // if a highlight was requested before DOM was loaded, do now if (wantsHighlight) highlightAll(); } @@ -60531,20 +62343,6 @@ const HLJS = function(hljs) { } } - /** - * Remove a language grammar module - * - * @param {string} languageName - */ - function unregisterLanguage(languageName) { - delete languages[languageName]; - for (const alias of Object.keys(aliases)) { - if (aliases[alias] === languageName) { - delete aliases[alias]; - } - } - } - /** * @returns {string[]} List of language internal names */ @@ -60590,7 +62388,7 @@ const HLJS = function(hljs) { if (typeof aliasList === 'string') { aliasList = [aliasList]; } - aliasList.forEach(alias => { aliases[alias.toLowerCase()] = languageName; }); + aliasList.forEach(alias => { aliases[alias] = languageName; }); } /** @@ -60602,34 +62400,10 @@ const HLJS = function(hljs) { return lang && !lang.disableAutodetect; } - /** - * Upgrades the old highlightBlock plugins to the new - * highlightElement API - * @param {HLJSPlugin} plugin - */ - function upgradePluginAPI(plugin) { - // TODO: remove with v12 - if (plugin["before:highlightBlock"] && !plugin["before:highlightElement"]) { - plugin["before:highlightElement"] = (data) => { - plugin["before:highlightBlock"]( - Object.assign({ block: data.el }, data) - ); - }; - } - if (plugin["after:highlightBlock"] && !plugin["after:highlightElement"]) { - plugin["after:highlightElement"] = (data) => { - plugin["after:highlightBlock"]( - Object.assign({ block: data.el }, data) - ); - }; - } - } - /** * @param {HLJSPlugin} plugin */ function addPlugin(plugin) { - upgradePluginAPI(plugin); plugins.push(plugin); } @@ -60660,31 +62434,17 @@ const HLJS = function(hljs) { return fixMarkup(arg); } - /** - * - * @param {HighlightedHTMLElement} el - */ - function deprecateHighlightBlock(el) { - deprecated("10.7.0", "highlightBlock will be removed entirely in v12.0"); - deprecated("10.7.0", "Please use highlightElement now."); - - return highlightElement(el); - } - /* Interface definition */ Object.assign(hljs, { highlight, highlightAuto, highlightAll, fixMarkup: deprecateFixMarkup, - highlightElement, - // TODO: Remove with v12 API - highlightBlock: deprecateHighlightBlock, + highlightBlock, configure, initHighlighting, initHighlightingOnLoad, registerLanguage, - unregisterLanguage, listLanguages, getLanguage, registerAliases, @@ -62523,6 +64283,7 @@ function arcade(hljs) { return { name: 'ArcGIS Arcade', + aliases: ['arcade'], keywords: KEYWORDS, contains: [ hljs.APOS_STRING_MODE, @@ -62633,14 +64394,6 @@ function source(re) { return re.source; } -/** - * @param {RegExp | string } re - * @returns {string} - */ -function lookahead(re) { - return concat('(?=', re, ')'); -} - /** * @param {RegExp | string } re * @returns {string} @@ -62748,7 +64501,9 @@ function cPlusPlus(hljs) { }), { className: 'meta-string', - begin: /<.*?>/ + begin: /<.*?>/, + end: /$/, + illegal: '\\n' }, C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE @@ -62763,122 +64518,6 @@ function cPlusPlus(hljs) { const FUNCTION_TITLE = optional(NAMESPACE_RE) + hljs.IDENT_RE + '\\s*\\('; - const COMMON_CPP_HINTS = [ - 'asin', - 'atan2', - 'atan', - 'calloc', - 'ceil', - 'cosh', - 'cos', - 'exit', - 'exp', - 'fabs', - 'floor', - 'fmod', - 'fprintf', - 'fputs', - 'free', - 'frexp', - 'auto_ptr', - 'deque', - 'list', - 'queue', - 'stack', - 'vector', - 'map', - 'set', - 'pair', - 'bitset', - 'multiset', - 'multimap', - 'unordered_set', - 'fscanf', - 'future', - 'isalnum', - 'isalpha', - 'iscntrl', - 'isdigit', - 'isgraph', - 'islower', - 'isprint', - 'ispunct', - 'isspace', - 'isupper', - 'isxdigit', - 'tolower', - 'toupper', - 'labs', - 'ldexp', - 'log10', - 'log', - 'malloc', - 'realloc', - 'memchr', - 'memcmp', - 'memcpy', - 'memset', - 'modf', - 'pow', - 'printf', - 'putchar', - 'puts', - 'scanf', - 'sinh', - 'sin', - 'snprintf', - 'sprintf', - 'sqrt', - 'sscanf', - 'strcat', - 'strchr', - 'strcmp', - 'strcpy', - 'strcspn', - 'strlen', - 'strncat', - 'strncmp', - 'strncpy', - 'strpbrk', - 'strrchr', - 'strspn', - 'strstr', - 'tanh', - 'tan', - 'unordered_map', - 'unordered_multiset', - 'unordered_multimap', - 'priority_queue', - 'make_pair', - 'array', - 'shared_ptr', - 'abort', - 'terminate', - 'abs', - 'acos', - 'vfprintf', - 'vprintf', - 'vsprintf', - 'endl', - 'initializer_list', - 'unique_ptr', - 'complex', - 'imaginary', - 'std', - 'string', - 'wstring', - 'cin', - 'cout', - 'cerr', - 'clog', - 'stdin', - 'stdout', - 'stderr', - 'stringstream', - 'istringstream', - 'ostringstream' - ]; - const CPP_KEYWORDS = { keyword: 'int float while private char char8_t char16_t char32_t catch import module export virtual operator sizeof ' + 'dynamic_cast|10 typedef const_cast|10 const for static_cast|10 union namespace ' + @@ -62892,27 +64531,19 @@ function cPlusPlus(hljs) { 'atomic_uchar atomic_short atomic_ushort atomic_int atomic_uint atomic_long atomic_ulong atomic_llong ' + 'atomic_ullong new throw return ' + 'and and_eq bitand bitor compl not not_eq or or_eq xor xor_eq', - built_in: '_Bool _Complex _Imaginary', - _relevance_hints: COMMON_CPP_HINTS, + built_in: 'std string wstring cin cout cerr clog stdin stdout stderr stringstream istringstream ostringstream ' + + 'auto_ptr deque list queue stack vector map set pair bitset multiset multimap unordered_set ' + + 'unordered_map unordered_multiset unordered_multimap priority_queue make_pair array shared_ptr abort terminate abs acos ' + + 'asin atan2 atan calloc ceil cosh cos exit exp fabs floor fmod fprintf fputs free frexp ' + + 'fscanf future isalnum isalpha iscntrl isdigit isgraph islower isprint ispunct isspace isupper ' + + 'isxdigit tolower toupper labs ldexp log10 log malloc realloc memchr memcmp memcpy memset modf pow ' + + 'printf putchar puts scanf sinh sin snprintf sprintf sqrt sscanf strcat strchr strcmp ' + + 'strcpy strcspn strlen strncat strncmp strncpy strpbrk strrchr strspn strstr tanh tan ' + + 'vfprintf vprintf vsprintf endl initializer_list unique_ptr _Bool complex _Complex imaginary _Imaginary', literal: 'true false nullptr NULL' }; - const FUNCTION_DISPATCH = { - className: "function.dispatch", - relevance: 0, - keywords: CPP_KEYWORDS, - begin: concat( - /\b/, - /(?!decltype)/, - /(?!if)/, - /(?!for)/, - /(?!while)/, - hljs.IDENT_RE, - lookahead(/\s*\(/)) - }; - const EXPRESSION_CONTAINS = [ - FUNCTION_DISPATCH, PREPROCESSOR, CPP_PRIMITIVE_TYPES, C_LINE_COMMENT_MODE, @@ -62921,7 +64552,6 @@ function cPlusPlus(hljs) { STRINGS ]; - const EXPRESSION_CONTEXT = { // This mode covers expression context where we can't expect a function // definition and shouldn't highlight anything that looks like one: @@ -62973,21 +64603,6 @@ function cPlusPlus(hljs) { contains: [ TITLE_MODE ], relevance: 0 }, - // needed because we do not have look-behind on the below rule - // to prevent it from grabbing the final : in a :: pair - { - begin: /::/, - relevance: 0 - }, - // initializers - { - begin: /:/, - endsWithParent: true, - contains: [ - STRINGS, - NUMBERS - ] - }, { className: 'params', begin: /\(/, @@ -63037,13 +64652,9 @@ function cPlusPlus(hljs) { ], keywords: CPP_KEYWORDS, illegal: '/ + begin: /<.*?>/, + end: /$/, + illegal: '\\n' }, C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE @@ -64967,122 +66553,6 @@ function cPlusPlus(hljs) { const FUNCTION_TITLE = optional(NAMESPACE_RE) + hljs.IDENT_RE + '\\s*\\('; - const COMMON_CPP_HINTS = [ - 'asin', - 'atan2', - 'atan', - 'calloc', - 'ceil', - 'cosh', - 'cos', - 'exit', - 'exp', - 'fabs', - 'floor', - 'fmod', - 'fprintf', - 'fputs', - 'free', - 'frexp', - 'auto_ptr', - 'deque', - 'list', - 'queue', - 'stack', - 'vector', - 'map', - 'set', - 'pair', - 'bitset', - 'multiset', - 'multimap', - 'unordered_set', - 'fscanf', - 'future', - 'isalnum', - 'isalpha', - 'iscntrl', - 'isdigit', - 'isgraph', - 'islower', - 'isprint', - 'ispunct', - 'isspace', - 'isupper', - 'isxdigit', - 'tolower', - 'toupper', - 'labs', - 'ldexp', - 'log10', - 'log', - 'malloc', - 'realloc', - 'memchr', - 'memcmp', - 'memcpy', - 'memset', - 'modf', - 'pow', - 'printf', - 'putchar', - 'puts', - 'scanf', - 'sinh', - 'sin', - 'snprintf', - 'sprintf', - 'sqrt', - 'sscanf', - 'strcat', - 'strchr', - 'strcmp', - 'strcpy', - 'strcspn', - 'strlen', - 'strncat', - 'strncmp', - 'strncpy', - 'strpbrk', - 'strrchr', - 'strspn', - 'strstr', - 'tanh', - 'tan', - 'unordered_map', - 'unordered_multiset', - 'unordered_multimap', - 'priority_queue', - 'make_pair', - 'array', - 'shared_ptr', - 'abort', - 'terminate', - 'abs', - 'acos', - 'vfprintf', - 'vprintf', - 'vsprintf', - 'endl', - 'initializer_list', - 'unique_ptr', - 'complex', - 'imaginary', - 'std', - 'string', - 'wstring', - 'cin', - 'cout', - 'cerr', - 'clog', - 'stdin', - 'stdout', - 'stderr', - 'stringstream', - 'istringstream', - 'ostringstream' - ]; - const CPP_KEYWORDS = { keyword: 'int float while private char char8_t char16_t char32_t catch import module export virtual operator sizeof ' + 'dynamic_cast|10 typedef const_cast|10 const for static_cast|10 union namespace ' + @@ -65096,27 +66566,19 @@ function cPlusPlus(hljs) { 'atomic_uchar atomic_short atomic_ushort atomic_int atomic_uint atomic_long atomic_ulong atomic_llong ' + 'atomic_ullong new throw return ' + 'and and_eq bitand bitor compl not not_eq or or_eq xor xor_eq', - built_in: '_Bool _Complex _Imaginary', - _relevance_hints: COMMON_CPP_HINTS, + built_in: 'std string wstring cin cout cerr clog stdin stdout stderr stringstream istringstream ostringstream ' + + 'auto_ptr deque list queue stack vector map set pair bitset multiset multimap unordered_set ' + + 'unordered_map unordered_multiset unordered_multimap priority_queue make_pair array shared_ptr abort terminate abs acos ' + + 'asin atan2 atan calloc ceil cosh cos exit exp fabs floor fmod fprintf fputs free frexp ' + + 'fscanf future isalnum isalpha iscntrl isdigit isgraph islower isprint ispunct isspace isupper ' + + 'isxdigit tolower toupper labs ldexp log10 log malloc realloc memchr memcmp memcpy memset modf pow ' + + 'printf putchar puts scanf sinh sin snprintf sprintf sqrt sscanf strcat strchr strcmp ' + + 'strcpy strcspn strlen strncat strncmp strncpy strpbrk strrchr strspn strstr tanh tan ' + + 'vfprintf vprintf vsprintf endl initializer_list unique_ptr _Bool complex _Complex imaginary _Imaginary', literal: 'true false nullptr NULL' }; - const FUNCTION_DISPATCH = { - className: "function.dispatch", - relevance: 0, - keywords: CPP_KEYWORDS, - begin: concat( - /\b/, - /(?!decltype)/, - /(?!if)/, - /(?!for)/, - /(?!while)/, - hljs.IDENT_RE, - lookahead(/\s*\(/)) - }; - const EXPRESSION_CONTAINS = [ - FUNCTION_DISPATCH, PREPROCESSOR, CPP_PRIMITIVE_TYPES, C_LINE_COMMENT_MODE, @@ -65125,7 +66587,6 @@ function cPlusPlus(hljs) { STRINGS ]; - const EXPRESSION_CONTEXT = { // This mode covers expression context where we can't expect a function // definition and shouldn't highlight anything that looks like one: @@ -65177,21 +66638,6 @@ function cPlusPlus(hljs) { contains: [ TITLE_MODE ], relevance: 0 }, - // needed because we do not have look-behind on the below rule - // to prevent it from grabbing the final : in a :: pair - { - begin: /::/, - relevance: 0 - }, - // initializers - { - begin: /:/, - endsWithParent: true, - contains: [ - STRINGS, - NUMBERS - ] - }, { className: 'params', begin: /\(/, @@ -65241,13 +66687,9 @@ function cPlusPlus(hljs) { ], keywords: CPP_KEYWORDS, illegal: '/ + begin: /<.*?>/, + end: /$/, + illegal: '\\n' }, C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE @@ -65591,6 +67035,7 @@ function c(hljs) { return { name: "C", aliases: [ + 'c', 'h' ], keywords: CPP_KEYWORDS, @@ -65928,6 +67373,7 @@ function clean(hljs) { return { name: 'Clean', aliases: [ + 'clean', 'icl', 'dcl' ], @@ -66316,10 +67762,7 @@ const TYPES = [ "Array", "Uint8Array", "Uint8ClampedArray", - "ArrayBuffer", - "BigInt64Array", - "BigUint64Array", - "BigInt" + "ArrayBuffer" ]; const ERROR_TYPES = [ @@ -66759,6 +68202,7 @@ function cos(hljs) { name: 'Caché Object Script', case_insensitive: true, aliases: [ + "cos", "cls" ], keywords: COS_KEYWORDS, @@ -66843,14 +68287,6 @@ function source(re) { return re.source; } -/** - * @param {RegExp | string } re - * @returns {string} - */ -function lookahead(re) { - return concat('(?=', re, ')'); -} - /** * @param {RegExp | string } re * @returns {string} @@ -66958,7 +68394,9 @@ function cpp(hljs) { }), { className: 'meta-string', - begin: /<.*?>/ + begin: /<.*?>/, + end: /$/, + illegal: '\\n' }, C_LINE_COMMENT_MODE, hljs.C_BLOCK_COMMENT_MODE @@ -66973,122 +68411,6 @@ function cpp(hljs) { const FUNCTION_TITLE = optional(NAMESPACE_RE) + hljs.IDENT_RE + '\\s*\\('; - const COMMON_CPP_HINTS = [ - 'asin', - 'atan2', - 'atan', - 'calloc', - 'ceil', - 'cosh', - 'cos', - 'exit', - 'exp', - 'fabs', - 'floor', - 'fmod', - 'fprintf', - 'fputs', - 'free', - 'frexp', - 'auto_ptr', - 'deque', - 'list', - 'queue', - 'stack', - 'vector', - 'map', - 'set', - 'pair', - 'bitset', - 'multiset', - 'multimap', - 'unordered_set', - 'fscanf', - 'future', - 'isalnum', - 'isalpha', - 'iscntrl', - 'isdigit', - 'isgraph', - 'islower', - 'isprint', - 'ispunct', - 'isspace', - 'isupper', - 'isxdigit', - 'tolower', - 'toupper', - 'labs', - 'ldexp', - 'log10', - 'log', - 'malloc', - 'realloc', - 'memchr', - 'memcmp', - 'memcpy', - 'memset', - 'modf', - 'pow', - 'printf', - 'putchar', - 'puts', - 'scanf', - 'sinh', - 'sin', - 'snprintf', - 'sprintf', - 'sqrt', - 'sscanf', - 'strcat', - 'strchr', - 'strcmp', - 'strcpy', - 'strcspn', - 'strlen', - 'strncat', - 'strncmp', - 'strncpy', - 'strpbrk', - 'strrchr', - 'strspn', - 'strstr', - 'tanh', - 'tan', - 'unordered_map', - 'unordered_multiset', - 'unordered_multimap', - 'priority_queue', - 'make_pair', - 'array', - 'shared_ptr', - 'abort', - 'terminate', - 'abs', - 'acos', - 'vfprintf', - 'vprintf', - 'vsprintf', - 'endl', - 'initializer_list', - 'unique_ptr', - 'complex', - 'imaginary', - 'std', - 'string', - 'wstring', - 'cin', - 'cout', - 'cerr', - 'clog', - 'stdin', - 'stdout', - 'stderr', - 'stringstream', - 'istringstream', - 'ostringstream' - ]; - const CPP_KEYWORDS = { keyword: 'int float while private char char8_t char16_t char32_t catch import module export virtual operator sizeof ' + 'dynamic_cast|10 typedef const_cast|10 const for static_cast|10 union namespace ' + @@ -67102,27 +68424,19 @@ function cpp(hljs) { 'atomic_uchar atomic_short atomic_ushort atomic_int atomic_uint atomic_long atomic_ulong atomic_llong ' + 'atomic_ullong new throw return ' + 'and and_eq bitand bitor compl not not_eq or or_eq xor xor_eq', - built_in: '_Bool _Complex _Imaginary', - _relevance_hints: COMMON_CPP_HINTS, + built_in: 'std string wstring cin cout cerr clog stdin stdout stderr stringstream istringstream ostringstream ' + + 'auto_ptr deque list queue stack vector map set pair bitset multiset multimap unordered_set ' + + 'unordered_map unordered_multiset unordered_multimap priority_queue make_pair array shared_ptr abort terminate abs acos ' + + 'asin atan2 atan calloc ceil cosh cos exit exp fabs floor fmod fprintf fputs free frexp ' + + 'fscanf future isalnum isalpha iscntrl isdigit isgraph islower isprint ispunct isspace isupper ' + + 'isxdigit tolower toupper labs ldexp log10 log malloc realloc memchr memcmp memcpy memset modf pow ' + + 'printf putchar puts scanf sinh sin snprintf sprintf sqrt sscanf strcat strchr strcmp ' + + 'strcpy strcspn strlen strncat strncmp strncpy strpbrk strrchr strspn strstr tanh tan ' + + 'vfprintf vprintf vsprintf endl initializer_list unique_ptr _Bool complex _Complex imaginary _Imaginary', literal: 'true false nullptr NULL' }; - const FUNCTION_DISPATCH = { - className: "function.dispatch", - relevance: 0, - keywords: CPP_KEYWORDS, - begin: concat( - /\b/, - /(?!decltype)/, - /(?!if)/, - /(?!for)/, - /(?!while)/, - hljs.IDENT_RE, - lookahead(/\s*\(/)) - }; - const EXPRESSION_CONTAINS = [ - FUNCTION_DISPATCH, PREPROCESSOR, CPP_PRIMITIVE_TYPES, C_LINE_COMMENT_MODE, @@ -67131,7 +68445,6 @@ function cpp(hljs) { STRINGS ]; - const EXPRESSION_CONTEXT = { // This mode covers expression context where we can't expect a function // definition and shouldn't highlight anything that looks like one: @@ -67183,21 +68496,6 @@ function cpp(hljs) { contains: [ TITLE_MODE ], relevance: 0 }, - // needed because we do not have look-behind on the below rule - // to prevent it from grabbing the final : in a :: pair - { - begin: /::/, - relevance: 0 - }, - // initializers - { - begin: /:/, - endsWithParent: true, - contains: [ - STRINGS, - NUMBERS - ] - }, { className: 'params', begin: /\(/, @@ -67247,13 +68545,9 @@ function cpp(hljs) { ], keywords: CPP_KEYWORDS, illegal: '", contains: [ - { - beginKeywords: "in out" - }, + { beginKeywords: "in out"}, TITLE_MODE ] }; - const TYPE_IDENT_RE = hljs.IDENT_RE + '(<' + hljs.IDENT_RE + '(\\s*,\\s*' + hljs.IDENT_RE + ')*>)?(\\[\\])?'; - const AT_IDENTIFIER = { + var TYPE_IDENT_RE = hljs.IDENT_RE + '(<' + hljs.IDENT_RE + '(\\s*,\\s*' + hljs.IDENT_RE + ')*>)?(\\[\\])?'; + var AT_IDENTIFIER = { // prevents expressions like `@class` from incorrect flagging // `class` as a keyword begin: "@" + hljs.IDENT_RE, @@ -68032,10 +69271,7 @@ function csharp(hljs) { return { name: 'C#', - aliases: [ - 'cs', - 'c#' - ], + aliases: ['cs', 'c#'], keywords: KEYWORDS, illegal: /::/, contains: [ @@ -68049,15 +69285,13 @@ function csharp(hljs) { className: 'doctag', variants: [ { - begin: '///', - relevance: 0 + begin: '///', relevance: 0 }, { begin: '' }, { - begin: '' + begin: '' } ] } @@ -68068,8 +69302,7 @@ function csharp(hljs) { hljs.C_BLOCK_COMMENT_MODE, { className: 'meta', - begin: '#', - end: '$', + begin: '#', end: '$', keywords: { 'meta-keyword': 'if else elif endif define undef warning error line region endregion pragma checksum' } @@ -68082,9 +69315,7 @@ function csharp(hljs) { end: /[{;=]/, illegal: /[^\s:,]/, contains: [ - { - beginKeywords: "where class" - }, + { beginKeywords: "where class" }, TITLE_MODE, GENERIC_MODIFIER, hljs.C_LINE_COMMENT_MODE, @@ -68117,16 +69348,9 @@ function csharp(hljs) { { // [Attributes("")] className: 'meta', - begin: '^\\s*\\[', - excludeBegin: true, - end: '\\]', - excludeEnd: true, + begin: '^\\s*\\[', excludeBegin: true, end: '\\]', excludeEnd: true, contains: [ - { - className: 'meta-string', - begin: /"/, - end: /"/ - } + {className: 'meta-string', begin: /"/, end: /"/} ] }, { @@ -68137,10 +69361,8 @@ function csharp(hljs) { }, { className: 'function', - begin: '(' + TYPE_IDENT_RE + '\\s+)+' + hljs.IDENT_RE + '\\s*(<.+>\\s*)?\\(', - returnBegin: true, - end: /\s*[{;=]/, - excludeEnd: true, + begin: '(' + TYPE_IDENT_RE + '\\s+)+' + hljs.IDENT_RE + '\\s*(<.+>\\s*)?\\(', returnBegin: true, + end: /\s*[{;=]/, excludeEnd: true, keywords: KEYWORDS, contains: [ // prevents these from being highlighted `title` @@ -68149,8 +69371,7 @@ function csharp(hljs) { relevance: 0 }, { - begin: hljs.IDENT_RE + '\\s*(<.+>\\s*)?\\(', - returnBegin: true, + begin: hljs.IDENT_RE + '\\s*(<.+>\\s*)?\\(', returnBegin: true, contains: [ hljs.TITLE_MODE, GENERIC_MODIFIER @@ -68159,8 +69380,7 @@ function csharp(hljs) { }, { className: 'params', - begin: /\(/, - end: /\)/, + begin: /\(/, end: /\)/, excludeBegin: true, excludeEnd: true, keywords: KEYWORDS, @@ -68551,7 +69771,6 @@ const ATTRIBUTES = [ 'font-language-override', 'font-size', 'font-size-adjust', - 'font-smoothing', 'font-stretch', 'font-style', 'font-variant', @@ -72164,17 +73383,14 @@ function gml(hljs) { const GML_KEYWORDS = { keyword: 'begin end if then else while do for break continue with until ' + 'repeat exit and or xor not return mod div switch case default var ' + - 'globalvar enum function constructor delete #macro #region #endregion', - built_in: 'is_real is_string is_array is_undefined is_int32 is_int64 is_ptr ' + - 'is_vec3 is_vec4 is_matrix is_bool is_method is_struct is_infinity is_nan ' + - 'is_numeric typeof variable_global_exists variable_global_get variable_global_set ' + + 'globalvar enum #macro #region #endregion', + built_in: 'is_real is_string is_array is_undefined is_int32 is_int64 ' + + 'is_ptr is_vec3 is_vec4 is_matrix is_bool typeof ' + + 'variable_global_exists variable_global_get variable_global_set ' + 'variable_instance_exists variable_instance_get variable_instance_set ' + - 'variable_instance_get_names variable_struct_exists variable_struct_get ' + - 'variable_struct_get_names variable_struct_names_count variable_struct_remove ' + - 'variable_struct_set array_delete array_insert array_length array_length_1d ' + - 'array_length_2d array_height_2d array_equals array_create ' + - 'array_copy array_pop array_push array_resize array_sort ' + - 'random random_range irandom irandom_range random_set_seed random_get_seed ' + + 'variable_instance_get_names array_length_1d array_length_2d ' + + 'array_height_2d array_equals array_create array_copy random ' + + 'random_range irandom irandom_range random_set_seed random_get_seed ' + 'randomize randomise choose abs round floor ceil sign frac sqrt sqr ' + 'exp ln log2 log10 sin cos tan arcsin arccos arctan arctan2 dsin dcos ' + 'dtan darcsin darccos darctan darctan2 degtorad radtodeg power logn ' + @@ -73023,6 +74239,10 @@ function gml(hljs) { return { name: 'GML', + aliases: [ + 'gml', + 'GML' + ], case_insensitive: false, // language is case-insensitive keywords: GML_KEYWORDS, @@ -74663,25 +75883,24 @@ Website: https://developer.mozilla.org/en-US/docs/Web/HTTP/Overview function http(hljs) { const VERSION = 'HTTP/(2|1\\.[01])'; const HEADER_NAME = /[A-Za-z][A-Za-z0-9-]*/; - const HEADER = { - className: 'attribute', - begin: concat('^', HEADER_NAME, '(?=\\:\\s)'), - starts: { - contains: [ - { - className: "punctuation", - begin: /: /, - relevance: 0, - starts: { - end: '$', - relevance: 0 - } - } - ] - } - }; const HEADERS_AND_BODY = [ - HEADER, + { + className: 'attribute', + begin: concat('^', HEADER_NAME, '(?=\\:\\s)'), + starts: { + contains: [ + { + className: "punctuation", + begin: /: /, + relevance: 0, + starts: { + end: '$', + relevance: 0 + } + } + ] + } + }, { begin: '\\n\\n', starts: { subLanguage: [], endsWithParent: true } @@ -74738,11 +75957,7 @@ function http(hljs) { illegal: /\S/, contains: HEADERS_AND_BODY } - }, - // to allow headers to work even without a preamble - hljs.inherit(HEADER, { - relevance: 0 - }) + } ] }; } @@ -78467,6 +79682,7 @@ function isbl(hljs) { return { name: 'ISBL', + aliases: ["isbl"], case_insensitive: true, keywords: KEYWORDS, illegal: "\\$|\\?|%|,|;$|~|#|@| // 'array abstract and as binary bool boolean break callable case catch class clone const continue declare ' + - 'default do double else elseif empty enddeclare endfor endforeach endif endswitch endwhile enum eval extends ' + + 'default do double else elseif empty enddeclare endfor endforeach endif endswitch endwhile eval extends ' + 'final finally float for foreach from global goto if implements instanceof insteadof int integer interface ' + - 'isset iterable list match|0 mixed new object or private protected public real return string switch throw trait ' + + 'isset iterable list match|0 new object or private protected public real return string switch throw trait ' + 'try unset use var void while xor yield', literal: 'false null true', built_in: // Standard PHP library: // 'Error|0 ' + // error is too common a name esp since PHP is case in-sensitive - 'AppendIterator ArgumentCountError ArithmeticError ArrayIterator ArrayObject AssertionError BadFunctionCallException BadMethodCallException CachingIterator CallbackFilterIterator CompileError Countable DirectoryIterator DivisionByZeroError DomainException EmptyIterator ErrorException Exception FilesystemIterator FilterIterator GlobIterator InfiniteIterator InvalidArgumentException IteratorIterator LengthException LimitIterator LogicException MultipleIterator NoRewindIterator OutOfBoundsException OutOfRangeException OuterIterator OverflowException ParentIterator ParseError RangeException RecursiveArrayIterator RecursiveCachingIterator RecursiveCallbackFilterIterator RecursiveDirectoryIterator RecursiveFilterIterator RecursiveIterator RecursiveIteratorIterator RecursiveRegexIterator RecursiveTreeIterator RegexIterator RuntimeException SeekableIterator SplDoublyLinkedList SplFileInfo SplFileObject SplFixedArray SplHeap SplMaxHeap SplMinHeap SplObjectStorage SplObserver SplObserver SplPriorityQueue SplQueue SplStack SplSubject SplSubject SplTempFileObject TypeError UnderflowException UnexpectedValueException UnhandledMatchError ' + + 'AppendIterator ArgumentCountError ArithmeticError ArrayIterator ArrayObject AssertionError BadFunctionCallException BadMethodCallException CachingIterator CallbackFilterIterator CompileError Countable DirectoryIterator DivisionByZeroError DomainException EmptyIterator ErrorException Exception FilesystemIterator FilterIterator GlobIterator InfiniteIterator InvalidArgumentException IteratorIterator LengthException LimitIterator LogicException MultipleIterator NoRewindIterator OutOfBoundsException OutOfRangeException OuterIterator OverflowException ParentIterator ParseError RangeException RecursiveArrayIterator RecursiveCachingIterator RecursiveCallbackFilterIterator RecursiveDirectoryIterator RecursiveFilterIterator RecursiveIterator RecursiveIteratorIterator RecursiveRegexIterator RecursiveTreeIterator RegexIterator RuntimeException SeekableIterator SplDoublyLinkedList SplFileInfo SplFileObject SplFixedArray SplHeap SplMaxHeap SplMinHeap SplObjectStorage SplObserver SplObserver SplPriorityQueue SplQueue SplStack SplSubject SplSubject SplTempFileObject TypeError UnderflowException UnexpectedValueException ' + // Reserved interfaces: // - 'ArrayAccess Closure Generator Iterator IteratorAggregate Serializable Stringable Throwable Traversable WeakReference WeakMap ' + + 'ArrayAccess Closure Generator Iterator IteratorAggregate Serializable Throwable Traversable WeakReference ' + // Reserved classes: // 'Directory __PHP_Incomplete_Class parent php_user_filter self static stdClass' }; return { - aliases: ['php3', 'php4', 'php5', 'php6', 'php7', 'php8'], + aliases: ['php', 'php3', 'php4', 'php5', 'php6', 'php7', 'php8'], case_insensitive: true, keywords: KEYWORDS, contains: [ @@ -93491,13 +94691,9 @@ function php(hljs) { beginKeywords: 'fn function', end: /[;{]/, excludeEnd: true, illegal: '[$%\\[]', contains: [ - { - beginKeywords: 'use', - }, hljs.UNDERSCORE_TITLE_MODE, { - begin: '=>', // No markup, just a relevance booster - endsParent: true + begin: '=>' // No markup, just a relevance booster }, { className: 'params', @@ -93517,13 +94713,11 @@ function php(hljs) { }, { className: 'class', - variants: [ - { beginKeywords: "enum", illegal: /[($"]/ }, - { beginKeywords: "class interface trait", illegal: /[:($"]/ } - ], + beginKeywords: 'class interface', relevance: 0, end: /\{/, excludeEnd: true, + illegal: /[:($"]/, contains: [ {beginKeywords: 'extends implements'}, hljs.UNDERSCORE_TITLE_MODE @@ -94691,39 +95885,6 @@ module.exports = pythonRepl; /***/ 85371: /***/ ((module) => { -/** - * @param {string} value - * @returns {RegExp} - * */ - -/** - * @param {RegExp | string } re - * @returns {string} - */ -function source(re) { - if (!re) return null; - if (typeof re === "string") return re; - - return re.source; -} - -/** - * @param {RegExp | string } re - * @returns {string} - */ -function lookahead(re) { - return concat('(?=', re, ')'); -} - -/** - * @param {...(RegExp | string) } args - * @returns {string} - */ -function concat(...args) { - const joined = args.map((x) => source(x)).join(""); - return joined; -} - /* Language: Python Description: Python is an interpreted, object-oriented, high-level programming language with dynamic semantics. @@ -94748,6 +95909,7 @@ function python(hljs) { 'except', 'finally', 'for', + '', 'from', 'global', 'if', @@ -94764,7 +95926,7 @@ function python(hljs) { 'try', 'while', 'with', - 'yield' + 'yield', ]; const BUILT_INS = [ @@ -94836,7 +95998,7 @@ function python(hljs) { 'tuple', 'type', 'vars', - 'zip' + 'zip', ]; const LITERALS = [ @@ -94845,45 +96007,22 @@ function python(hljs) { 'False', 'None', 'NotImplemented', - 'True' - ]; - - // https://docs.python.org/3/library/typing.html - // TODO: Could these be supplemented by a CamelCase matcher in certain - // contexts, leaving these remaining only for relevance hinting? - const TYPES = [ - "Any", - "Callable", - "Coroutine", - "Dict", - "List", - "Literal", - "Generic", - "Optional", - "Sequence", - "Set", - "Tuple", - "Type", - "Union" + 'True', ]; const KEYWORDS = { - $pattern: /[A-Za-z]\w+|__\w+__/, keyword: RESERVED_WORDS, built_in: BUILT_INS, - literal: LITERALS, - type: TYPES + literal: LITERALS }; const PROMPT = { - className: 'meta', - begin: /^(>>>|\.\.\.) / + className: 'meta', begin: /^(>>>|\.\.\.) / }; const SUBST = { className: 'subst', - begin: /\{/, - end: /\}/, + begin: /\{/, end: /\}/, keywords: KEYWORDS, illegal: /#/ }; @@ -94895,81 +96034,47 @@ function python(hljs) { const STRING = { className: 'string', - contains: [ hljs.BACKSLASH_ESCAPE ], + contains: [hljs.BACKSLASH_ESCAPE], variants: [ { - begin: /([uU]|[bB]|[rR]|[bB][rR]|[rR][bB])?'''/, - end: /'''/, - contains: [ - hljs.BACKSLASH_ESCAPE, - PROMPT - ], + begin: /([uU]|[bB]|[rR]|[bB][rR]|[rR][bB])?'''/, end: /'''/, + contains: [hljs.BACKSLASH_ESCAPE, PROMPT], relevance: 10 }, { - begin: /([uU]|[bB]|[rR]|[bB][rR]|[rR][bB])?"""/, - end: /"""/, - contains: [ - hljs.BACKSLASH_ESCAPE, - PROMPT - ], + begin: /([uU]|[bB]|[rR]|[bB][rR]|[rR][bB])?"""/, end: /"""/, + contains: [hljs.BACKSLASH_ESCAPE, PROMPT], relevance: 10 }, { - begin: /([fF][rR]|[rR][fF]|[fF])'''/, - end: /'''/, - contains: [ - hljs.BACKSLASH_ESCAPE, - PROMPT, - LITERAL_BRACKET, - SUBST - ] + begin: /([fF][rR]|[rR][fF]|[fF])'''/, end: /'''/, + contains: [hljs.BACKSLASH_ESCAPE, PROMPT, LITERAL_BRACKET, SUBST] }, { - begin: /([fF][rR]|[rR][fF]|[fF])"""/, - end: /"""/, - contains: [ - hljs.BACKSLASH_ESCAPE, - PROMPT, - LITERAL_BRACKET, - SUBST - ] + begin: /([fF][rR]|[rR][fF]|[fF])"""/, end: /"""/, + contains: [hljs.BACKSLASH_ESCAPE, PROMPT, LITERAL_BRACKET, SUBST] }, { - begin: /([uU]|[rR])'/, - end: /'/, + begin: /([uU]|[rR])'/, end: /'/, relevance: 10 }, { - begin: /([uU]|[rR])"/, - end: /"/, + begin: /([uU]|[rR])"/, end: /"/, relevance: 10 }, { - begin: /([bB]|[bB][rR]|[rR][bB])'/, - end: /'/ + begin: /([bB]|[bB][rR]|[rR][bB])'/, end: /'/ }, { - begin: /([bB]|[bB][rR]|[rR][bB])"/, - end: /"/ + begin: /([bB]|[bB][rR]|[rR][bB])"/, end: /"/ }, { - begin: /([fF][rR]|[rR][fF]|[fF])'/, - end: /'/, - contains: [ - hljs.BACKSLASH_ESCAPE, - LITERAL_BRACKET, - SUBST - ] + begin: /([fF][rR]|[rR][fF]|[fF])'/, end: /'/, + contains: [hljs.BACKSLASH_ESCAPE, LITERAL_BRACKET, SUBST] }, { - begin: /([fF][rR]|[rR][fF]|[fF])"/, - end: /"/, - contains: [ - hljs.BACKSLASH_ESCAPE, - LITERAL_BRACKET, - SUBST - ] + begin: /([fF][rR]|[rR][fF]|[fF])"/, end: /"/, + contains: [hljs.BACKSLASH_ESCAPE, LITERAL_BRACKET, SUBST] }, hljs.APOS_STRING_MODE, hljs.QUOTE_STRING_MODE @@ -94980,8 +96085,7 @@ function python(hljs) { const digitpart = '[0-9](_?[0-9])*'; const pointfloat = `(\\b(${digitpart}))?\\.(${digitpart})|\\b(${digitpart})\\.`; const NUMBER = { - className: 'number', - relevance: 0, + className: 'number', relevance: 0, variants: [ // exponentfloat, pointfloat // https://docs.python.org/3.9/reference/lexical_analysis.html#floating-point-literals @@ -94993,12 +96097,8 @@ function python(hljs) { // and we don't want to mishandle e.g. `0..hex()`; this should be safe // because both MUST contain a decimal point and so cannot be confused with // the interior part of an identifier - { - begin: `(\\b(${digitpart})|(${pointfloat}))[eE][+-]?(${digitpart})[jJ]?\\b` - }, - { - begin: `(${pointfloat})[jJ]?` - }, + { begin: `(\\b(${digitpart})|(${pointfloat}))[eE][+-]?(${digitpart})[jJ]?\\b` }, + { begin: `(${pointfloat})[jJ]?` }, // decinteger, bininteger, octinteger, hexinteger // https://docs.python.org/3.9/reference/lexical_analysis.html#integer-literals @@ -95006,109 +96106,49 @@ function python(hljs) { // https://docs.python.org/2.7/reference/lexical_analysis.html#integer-and-long-integer-literals // decinteger is optionally imaginary // https://docs.python.org/3.9/reference/lexical_analysis.html#imaginary-literals - { - begin: '\\b([1-9](_?[0-9])*|0+(_?0)*)[lLjJ]?\\b' - }, - { - begin: '\\b0[bB](_?[01])+[lL]?\\b' - }, - { - begin: '\\b0[oO](_?[0-7])+[lL]?\\b' - }, - { - begin: '\\b0[xX](_?[0-9a-fA-F])+[lL]?\\b' - }, + { begin: '\\b([1-9](_?[0-9])*|0+(_?0)*)[lLjJ]?\\b' }, + { begin: '\\b0[bB](_?[01])+[lL]?\\b' }, + { begin: '\\b0[oO](_?[0-7])+[lL]?\\b' }, + { begin: '\\b0[xX](_?[0-9a-fA-F])+[lL]?\\b' }, // imagnumber (digitpart-based) // https://docs.python.org/3.9/reference/lexical_analysis.html#imaginary-literals - { - begin: `\\b(${digitpart})[jJ]\\b` - } - ] - }; - const COMMENT_TYPE = { - className: "comment", - begin: lookahead(/# type:/), - end: /$/, - keywords: KEYWORDS, - contains: [ - { // prevent keywords from coloring `type` - begin: /# type:/ - }, - // comment within a datatype comment includes no keywords - { - begin: /#/, - end: /\b\B/, - endsWithParent: true - } + { begin: `\\b(${digitpart})[jJ]\\b` }, ] }; + const PARAMS = { className: 'params', variants: [ - // Exclude params in functions without params + // Exclude params at functions without params + {begin: /\(\s*\)/, skip: true, className: null }, { - className: "", - begin: /\(\s*\)/, - skip: true - }, - { - begin: /\(/, - end: /\)/, - excludeBegin: true, - excludeEnd: true, + begin: /\(/, end: /\)/, excludeBegin: true, excludeEnd: true, keywords: KEYWORDS, - contains: [ - 'self', - PROMPT, - NUMBER, - STRING, - hljs.HASH_COMMENT_MODE - ] - } - ] + contains: ['self', PROMPT, NUMBER, STRING, hljs.HASH_COMMENT_MODE], + }, + ], }; - SUBST.contains = [ - STRING, - NUMBER, - PROMPT - ]; + SUBST.contains = [STRING, NUMBER, PROMPT]; return { name: 'Python', - aliases: [ - 'py', - 'gyp', - 'ipython' - ], + aliases: ['py', 'gyp', 'ipython'], keywords: KEYWORDS, illegal: /(<\/|->|\?)|=>/, contains: [ PROMPT, NUMBER, - { - // very common convention - begin: /\bself\b/ - }, - { - // eat "if" prior to string so that it won't accidentally be - // labeled as an f-string - beginKeywords: "if", - relevance: 0 - }, + // eat "if" prior to string so that it won't accidentally be + // labeled as an f-string as in: + { begin: /\bself\b/, }, // very common convention + { beginKeywords: "if", relevance: 0 }, STRING, - COMMENT_TYPE, hljs.HASH_COMMENT_MODE, { variants: [ - { - className: 'function', - beginKeywords: 'def' - }, - { - className: 'class', - beginKeywords: 'class' - } + {className: 'function', beginKeywords: 'def'}, + {className: 'class', beginKeywords: 'class'} ], end: /:/, illegal: /[${=;\n,]/, @@ -95116,21 +96156,18 @@ function python(hljs) { hljs.UNDERSCORE_TITLE_MODE, PARAMS, { - begin: /->/, - endsWithParent: true, - keywords: KEYWORDS + begin: /->/, endsWithParent: true, + keywords: 'None' } ] }, { className: 'meta', - begin: /^[\t ]*@/, - end: /(?=#)|$/, - contains: [ - NUMBER, - PARAMS, - STRING - ] + begin: /^[\t ]*@/, end: /(?=#)|$/, + contains: [NUMBER, PARAMS, STRING] + }, + { + begin: /\b(print|exec)\(/ // don’t highlight keywords-turned-functions in Python 3 } ] }; @@ -96182,6 +97219,7 @@ function routeros(hljs) { return { name: 'Microtik RouterOS script', aliases: [ + 'routeros', 'mikrotik' ], case_insensitive: true, @@ -96604,7 +97642,7 @@ function ruby(hljs) { // def method_name( // def method_name; // def method_name (end of line) - begin: concat(/def\s+/, lookahead(RUBY_METHOD_RE + "\\s*(\\(|;|$)")), + begin: concat(/def\s*/, lookahead(RUBY_METHOD_RE + "\\s*(\\(|;|$)")), relevance: 0, // relevance comes from kewords keywords: "def", end: '$|;', @@ -97067,6 +98105,10 @@ function sas(hljs) { return { name: 'SAS', + aliases: [ + 'sas', + 'SAS' + ], case_insensitive: true, // SAS is case-insensitive keywords: { literal: @@ -97886,7 +98928,6 @@ const ATTRIBUTES = [ 'font-language-override', 'font-size', 'font-size-adjust', - 'font-smoothing', 'font-stretch', 'font-style', 'font-variant', @@ -98229,6 +99270,7 @@ function smali(hljs) { ]; return { name: 'Smali', + aliases: [ 'smali' ], contains: [ { className: 'string', @@ -98534,6 +99576,7 @@ function sqf(hljs) { return { name: 'SQF', + aliases: [ 'sqf' ], case_insensitive: true, keywords: { keyword: @@ -100821,7 +101864,6 @@ const ATTRIBUTES = [ 'font-language-override', 'font-size', 'font-size-adjust', - 'font-smoothing', 'font-stretch', 'font-style', 'font-variant', @@ -101251,8 +102293,6 @@ const keywords = [ // will result in additional modes being created to scan for those keywords to // avoid conflicts with other rules 'associatedtype', - 'async', - 'await', /as\?/, // operator /as!/, // operator 'as', // operator @@ -102177,39 +103217,6 @@ module.exports = tap; /***/ 70411: /***/ ((module) => { -/** - * @param {string} value - * @returns {RegExp} - * */ - -/** - * @param {RegExp | string } re - * @returns {string} - */ -function source(re) { - if (!re) return null; - if (typeof re === "string") return re; - - return re.source; -} - -/** - * @param {RegExp | string } re - * @returns {string} - */ -function optional(re) { - return concat('(', re, ')?'); -} - -/** - * @param {...(RegExp | string) } args - * @returns {string} - */ -function concat(...args) { - const joined = args.map((x) => source(x)).join(""); - return joined; -} - /* Language: Tcl Description: Tcl is a very simple programming language. @@ -102218,13 +103225,6 @@ Website: https://www.tcl.tk/about/language.html */ function tcl(hljs) { - const TCL_IDENT = /[a-zA-Z_][a-zA-Z0-9_]*/; - - const NUMBER = { - className: 'number', - variants: [hljs.BINARY_NUMBER_MODE, hljs.C_NUMBER_MODE] - }; - return { name: 'Tcl', aliases: ['tk'], @@ -102258,24 +103258,15 @@ function tcl(hljs) { ] }, { - className: "variable", + excludeEnd: true, variants: [ { - begin: concat( - /\$/, - optional(/::/), - TCL_IDENT, - '(::', - TCL_IDENT, - ')*' - ) + begin: '\\$(\\{)?(::)?[a-zA-Z_]((::)?[a-zA-Z0-9_])*\\(([a-zA-Z0-9_])*\\)', + end: '[^a-zA-Z0-9_\\}\\$]' }, { - begin: '\\$\\{(::)?[a-zA-Z_]((::)?[a-zA-Z0-9_])*', - end: '\\}', - contains: [ - NUMBER - ] + begin: '\\$(\\{)?(::)?[a-zA-Z_]((::)?[a-zA-Z0-9_])*', + end: '(\\))?[^a-zA-Z0-9_\\}\\$]' } ] }, @@ -102286,7 +103277,10 @@ function tcl(hljs) { hljs.inherit(hljs.QUOTE_STRING_MODE, {illegal: null}) ] }, - NUMBER + { + className: 'number', + variants: [hljs.BINARY_NUMBER_MODE, hljs.C_NUMBER_MODE] + } ] } } @@ -102627,10 +103621,7 @@ const TYPES = [ "Array", "Uint8Array", "Uint8ClampedArray", - "ArrayBuffer", - "BigInt64Array", - "BigUint64Array", - "BigInt" + "ArrayBuffer" ]; const ERROR_TYPES = [ @@ -103233,7 +104224,7 @@ function typescript(hljs) { Object.assign(tsLanguage, { name: 'TypeScript', - aliases: ['ts', 'tsx'] + aliases: ['ts'] }); return tsLanguage; @@ -104575,8 +105566,7 @@ function xml(hljs) { }, { begin: />/, - relevance: 0, - endsParent: true + relevance: 0 } ] } @@ -104966,7 +105956,7 @@ function yaml(hljs) { return { name: 'YAML', case_insensitive: true, - aliases: [ 'yml' ], + aliases: ['yml', 'YAML'], contains: MODES }; } @@ -106212,6 +107202,60 @@ if (typeof Object.create === 'function') { } +/***/ }), + +/***/ 64882: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var numberIsNan = __nccwpck_require__(16325); + +module.exports = function (x) { + if (numberIsNan(x)) { + return false; + } + + // https://github.com/nodejs/io.js/blob/cff7300a578be1b10001f2d967aaedc88aee6402/lib/readline.js#L1369 + + // code points are derived from: + // http://www.unix.org/Public/UNIDATA/EastAsianWidth.txt + if (x >= 0x1100 && ( + x <= 0x115f || // Hangul Jamo + 0x2329 === x || // LEFT-POINTING ANGLE BRACKET + 0x232a === x || // RIGHT-POINTING ANGLE BRACKET + // CJK Radicals Supplement .. Enclosed CJK Letters and Months + (0x2e80 <= x && x <= 0x3247 && x !== 0x303f) || + // Enclosed CJK Letters and Months .. CJK Unified Ideographs Extension A + 0x3250 <= x && x <= 0x4dbf || + // CJK Unified Ideographs .. Yi Radicals + 0x4e00 <= x && x <= 0xa4c6 || + // Hangul Jamo Extended-A + 0xa960 <= x && x <= 0xa97c || + // Hangul Syllables + 0xac00 <= x && x <= 0xd7a3 || + // CJK Compatibility Ideographs + 0xf900 <= x && x <= 0xfaff || + // Vertical Forms + 0xfe10 <= x && x <= 0xfe19 || + // CJK Compatibility Forms .. Small Form Variants + 0xfe30 <= x && x <= 0xfe6b || + // Halfwidth and Fullwidth Forms + 0xff01 <= x && x <= 0xff60 || + 0xffe0 <= x && x <= 0xffe6 || + // Kana Supplement + 0x1b000 <= x && x <= 0x1b001 || + // Enclosed Ideographic Supplement + 0x1f200 <= x && x <= 0x1f251 || + // CJK Unified Ideographs Extension B .. Tertiary Ideographic Plane + 0x20000 <= x && x <= 0x3fffd)) { + return true; + } + + return false; +} + + /***/ }), /***/ 10657: @@ -106315,55 +107359,62 @@ module.exports.isDuplex = isDuplex -var loader = __nccwpck_require__(51161); -var dumper = __nccwpck_require__(68866); +var yaml = __nccwpck_require__(40916); + +module.exports = yaml; -function renamed(from, to) { + +/***/ }), + +/***/ 40916: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + + +var loader = __nccwpck_require__(45190); +var dumper = __nccwpck_require__(73034); + + +function deprecated(name) { return function () { - throw new Error('Function yaml.' + from + ' is removed in js-yaml 4. ' + - 'Use yaml.' + to + ' instead, which is now safe by default.'); + throw new Error('Function ' + name + ' is deprecated and cannot be used.'); }; } -module.exports.Type = __nccwpck_require__(6073); -module.exports.Schema = __nccwpck_require__(21082); -module.exports.FAILSAFE_SCHEMA = __nccwpck_require__(28562); -module.exports.JSON_SCHEMA = __nccwpck_require__(1035); -module.exports.CORE_SCHEMA = __nccwpck_require__(12011); -module.exports.DEFAULT_SCHEMA = __nccwpck_require__(18759); +module.exports.Type = __nccwpck_require__(30967); +module.exports.Schema = __nccwpck_require__(66514); +module.exports.FAILSAFE_SCHEMA = __nccwpck_require__(66037); +module.exports.JSON_SCHEMA = __nccwpck_require__(1571); +module.exports.CORE_SCHEMA = __nccwpck_require__(92183); +module.exports.DEFAULT_SAFE_SCHEMA = __nccwpck_require__(48949); +module.exports.DEFAULT_FULL_SCHEMA = __nccwpck_require__(56874); module.exports.load = loader.load; module.exports.loadAll = loader.loadAll; +module.exports.safeLoad = loader.safeLoad; +module.exports.safeLoadAll = loader.safeLoadAll; module.exports.dump = dumper.dump; -module.exports.YAMLException = __nccwpck_require__(68179); +module.exports.safeDump = dumper.safeDump; +module.exports.YAMLException = __nccwpck_require__(65199); -// Re-export all types in case user wants to create custom schema -module.exports.types = { - binary: __nccwpck_require__(77900), - float: __nccwpck_require__(42705), - map: __nccwpck_require__(86150), - null: __nccwpck_require__(20721), - pairs: __nccwpck_require__(96860), - set: __nccwpck_require__(79548), - timestamp: __nccwpck_require__(99212), - bool: __nccwpck_require__(64993), - int: __nccwpck_require__(11615), - merge: __nccwpck_require__(86104), - omap: __nccwpck_require__(19046), - seq: __nccwpck_require__(67283), - str: __nccwpck_require__(23619) -}; +// Deprecated schema names from JS-YAML 2.0.x +module.exports.MINIMAL_SCHEMA = __nccwpck_require__(66037); +module.exports.SAFE_SCHEMA = __nccwpck_require__(48949); +module.exports.DEFAULT_SCHEMA = __nccwpck_require__(56874); -// Removed functions from JS-YAML 3.0.x -module.exports.safeLoad = renamed('safeLoad', 'load'); -module.exports.safeLoadAll = renamed('safeLoadAll', 'loadAll'); -module.exports.safeDump = renamed('safeDump', 'dump'); +// Deprecated functions from JS-YAML 1.x.x +module.exports.scan = deprecated('scan'); +module.exports.parse = deprecated('parse'); +module.exports.compose = deprecated('compose'); +module.exports.addConstructor = deprecated('addConstructor'); /***/ }), -/***/ 26829: +/***/ 59136: /***/ ((module) => { "use strict"; @@ -106430,7 +107481,7 @@ module.exports.extend = extend; /***/ }), -/***/ 68866: +/***/ 73034: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; @@ -106438,14 +107489,14 @@ module.exports.extend = extend; /*eslint-disable no-use-before-define*/ -var common = __nccwpck_require__(26829); -var YAMLException = __nccwpck_require__(68179); -var DEFAULT_SCHEMA = __nccwpck_require__(18759); +var common = __nccwpck_require__(59136); +var YAMLException = __nccwpck_require__(65199); +var DEFAULT_FULL_SCHEMA = __nccwpck_require__(56874); +var DEFAULT_SAFE_SCHEMA = __nccwpck_require__(48949); var _toString = Object.prototype.toString; var _hasOwnProperty = Object.prototype.hasOwnProperty; -var CHAR_BOM = 0xFEFF; var CHAR_TAB = 0x09; /* Tab */ var CHAR_LINE_FEED = 0x0A; /* LF */ var CHAR_CARRIAGE_RETURN = 0x0D; /* CR */ @@ -106494,8 +107545,6 @@ var DEPRECATED_BOOLEANS_SYNTAX = [ 'n', 'N', 'no', 'No', 'NO', 'off', 'Off', 'OFF' ]; -var DEPRECATED_BASE60_SYNTAX = /^[-+]?[0-9_]+(?::[0-9_]+)+(?:\.[0-9_]*)?$/; - function compileStyleMap(schema, map) { var result, keys, index, length, tag, style, type; @@ -106544,12 +107593,8 @@ function encodeHex(character) { return '\\' + handle + common.repeat('0', length - string.length) + string; } - -var QUOTING_TYPE_SINGLE = 1, - QUOTING_TYPE_DOUBLE = 2; - function State(options) { - this.schema = options['schema'] || DEFAULT_SCHEMA; + this.schema = options['schema'] || DEFAULT_FULL_SCHEMA; this.indent = Math.max(1, (options['indent'] || 2)); this.noArrayIndent = options['noArrayIndent'] || false; this.skipInvalid = options['skipInvalid'] || false; @@ -106560,9 +107605,6 @@ function State(options) { this.noRefs = options['noRefs'] || false; this.noCompatMode = options['noCompatMode'] || false; this.condenseFlow = options['condenseFlow'] || false; - this.quotingType = options['quotingType'] === '"' ? QUOTING_TYPE_DOUBLE : QUOTING_TYPE_SINGLE; - this.forceQuotes = options['forceQuotes'] || false; - this.replacer = typeof options['replacer'] === 'function' ? options['replacer'] : null; this.implicitTypes = this.schema.compiledImplicit; this.explicitTypes = this.schema.compiledExplicit; @@ -106631,60 +107673,47 @@ function isWhitespace(c) { function isPrintable(c) { return (0x00020 <= c && c <= 0x00007E) || ((0x000A1 <= c && c <= 0x00D7FF) && c !== 0x2028 && c !== 0x2029) - || ((0x0E000 <= c && c <= 0x00FFFD) && c !== CHAR_BOM) + || ((0x0E000 <= c && c <= 0x00FFFD) && c !== 0xFEFF /* BOM */) || (0x10000 <= c && c <= 0x10FFFF); } // [34] ns-char ::= nb-char - s-white // [27] nb-char ::= c-printable - b-char - c-byte-order-mark // [26] b-char ::= b-line-feed | b-carriage-return -// Including s-white (for some reason, examples doesn't match specs in this aspect) -// ns-char ::= c-printable - b-line-feed - b-carriage-return - c-byte-order-mark -function isNsCharOrWhitespace(c) { - return isPrintable(c) - && c !== CHAR_BOM - // - b-char +// [24] b-line-feed ::= #xA /* LF */ +// [25] b-carriage-return ::= #xD /* CR */ +// [3] c-byte-order-mark ::= #xFEFF +function isNsChar(c) { + return isPrintable(c) && !isWhitespace(c) + // byte-order-mark + && c !== 0xFEFF + // b-char && c !== CHAR_CARRIAGE_RETURN && c !== CHAR_LINE_FEED; } -// [127] ns-plain-safe(c) ::= c = flow-out ⇒ ns-plain-safe-out -// c = flow-in ⇒ ns-plain-safe-in -// c = block-key ⇒ ns-plain-safe-out -// c = flow-key ⇒ ns-plain-safe-in -// [128] ns-plain-safe-out ::= ns-char -// [129] ns-plain-safe-in ::= ns-char - c-flow-indicator -// [130] ns-plain-char(c) ::= ( ns-plain-safe(c) - “:” - “#” ) -// | ( /* An ns-char preceding */ “#” ) -// | ( “:” /* Followed by an ns-plain-safe(c) */ ) -function isPlainSafe(c, prev, inblock) { - var cIsNsCharOrWhitespace = isNsCharOrWhitespace(c); - var cIsNsChar = cIsNsCharOrWhitespace && !isWhitespace(c); - return ( - // ns-plain-safe - inblock ? // c = flow-in - cIsNsCharOrWhitespace - : cIsNsCharOrWhitespace - // - c-flow-indicator - && c !== CHAR_COMMA - && c !== CHAR_LEFT_SQUARE_BRACKET - && c !== CHAR_RIGHT_SQUARE_BRACKET - && c !== CHAR_LEFT_CURLY_BRACKET - && c !== CHAR_RIGHT_CURLY_BRACKET - ) - // ns-plain-char - && c !== CHAR_SHARP // false on '#' - && !(prev === CHAR_COLON && !cIsNsChar) // false on ': ' - || (isNsCharOrWhitespace(prev) && !isWhitespace(prev) && c === CHAR_SHARP) // change to true on '[^ ]#' - || (prev === CHAR_COLON && cIsNsChar); // change to true on ':[^ ]' +// Simplified test for values allowed after the first character in plain style. +function isPlainSafe(c, prev) { + // Uses a subset of nb-char - c-flow-indicator - ":" - "#" + // where nb-char ::= c-printable - b-char - c-byte-order-mark. + return isPrintable(c) && c !== 0xFEFF + // - c-flow-indicator + && c !== CHAR_COMMA + && c !== CHAR_LEFT_SQUARE_BRACKET + && c !== CHAR_RIGHT_SQUARE_BRACKET + && c !== CHAR_LEFT_CURLY_BRACKET + && c !== CHAR_RIGHT_CURLY_BRACKET + // - ":" - "#" + // /* An ns-char preceding */ "#" + && c !== CHAR_COLON + && ((c !== CHAR_SHARP) || (prev && isNsChar(prev))); } // Simplified test for values allowed as the first character in plain style. function isPlainSafeFirst(c) { // Uses a subset of ns-char - c-indicator // where ns-char = nb-char - s-white. - // No support of ( ( “?” | “:” | “-” ) /* Followed by an ns-plain-safe(c)) */ ) part - return isPrintable(c) && c !== CHAR_BOM + return isPrintable(c) && c !== 0xFEFF && !isWhitespace(c) // - s-white // - (c-indicator ::= // “-” | “?” | “:” | “,” | “[” | “]” | “{” | “}” @@ -106712,25 +107741,6 @@ function isPlainSafeFirst(c) { && c !== CHAR_GRAVE_ACCENT; } -// Simplified test for values allowed as the last character in plain style. -function isPlainSafeLast(c) { - // just not whitespace or colon, it will be checked to be plain character later - return !isWhitespace(c) && c !== CHAR_COLON; -} - -// Same as 'string'.codePointAt(pos), but works in older browsers. -function codePointAt(string, pos) { - var first = string.charCodeAt(pos), second; - if (first >= 0xD800 && first <= 0xDBFF && pos + 1 < string.length) { - second = string.charCodeAt(pos + 1); - if (second >= 0xDC00 && second <= 0xDFFF) { - // https://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae - return (first - 0xD800) * 0x400 + second - 0xDC00 + 0x10000; - } - } - return first; -} - // Determines whether block indentation indicator is required. function needIndentIndicator(string) { var leadingSpaceRe = /^\n* /; @@ -106750,34 +107760,31 @@ var STYLE_PLAIN = 1, // STYLE_PLAIN or STYLE_SINGLE => no \n are in the string. // STYLE_LITERAL => no lines are suitable for folding (or lineWidth is -1). // STYLE_FOLDED => a line > lineWidth and can be folded (and lineWidth != -1). -function chooseScalarStyle(string, singleLineOnly, indentPerLevel, lineWidth, - testAmbiguousType, quotingType, forceQuotes, inblock) { - +function chooseScalarStyle(string, singleLineOnly, indentPerLevel, lineWidth, testAmbiguousType) { var i; - var char = 0; - var prevChar = null; + var char, prev_char; var hasLineBreak = false; var hasFoldableLine = false; // only checked if shouldTrackWidth var shouldTrackWidth = lineWidth !== -1; var previousLineBreak = -1; // count the first line correctly - var plain = isPlainSafeFirst(codePointAt(string, 0)) - && isPlainSafeLast(codePointAt(string, string.length - 1)); + var plain = isPlainSafeFirst(string.charCodeAt(0)) + && !isWhitespace(string.charCodeAt(string.length - 1)); - if (singleLineOnly || forceQuotes) { + if (singleLineOnly) { // Case: no block styles. // Check for disallowed characters to rule out plain and single. - for (i = 0; i < string.length; char >= 0x10000 ? i += 2 : i++) { - char = codePointAt(string, i); + for (i = 0; i < string.length; i++) { + char = string.charCodeAt(i); if (!isPrintable(char)) { return STYLE_DOUBLE; } - plain = plain && isPlainSafe(char, prevChar, inblock); - prevChar = char; + prev_char = i > 0 ? string.charCodeAt(i - 1) : null; + plain = plain && isPlainSafe(char, prev_char); } } else { // Case: block styles permitted. - for (i = 0; i < string.length; char >= 0x10000 ? i += 2 : i++) { - char = codePointAt(string, i); + for (i = 0; i < string.length; i++) { + char = string.charCodeAt(i); if (char === CHAR_LINE_FEED) { hasLineBreak = true; // Check if any line can be folded. @@ -106791,8 +107798,8 @@ function chooseScalarStyle(string, singleLineOnly, indentPerLevel, lineWidth, } else if (!isPrintable(char)) { return STYLE_DOUBLE; } - plain = plain && isPlainSafe(char, prevChar, inblock); - prevChar = char; + prev_char = i > 0 ? string.charCodeAt(i - 1) : null; + plain = plain && isPlainSafe(char, prev_char); } // in case the end is missing a \n hasFoldableLine = hasFoldableLine || (shouldTrackWidth && @@ -106805,10 +107812,8 @@ function chooseScalarStyle(string, singleLineOnly, indentPerLevel, lineWidth, if (!hasLineBreak && !hasFoldableLine) { // Strings interpretable as another type have to be quoted; // e.g. the string 'true' vs. the boolean true. - if (plain && !forceQuotes && !testAmbiguousType(string)) { - return STYLE_PLAIN; - } - return quotingType === QUOTING_TYPE_DOUBLE ? STYLE_DOUBLE : STYLE_SINGLE; + return plain && !testAmbiguousType(string) + ? STYLE_PLAIN : STYLE_SINGLE; } // Edge case: block indentation indicator can only have one digit. if (indentPerLevel > 9 && needIndentIndicator(string)) { @@ -106816,10 +107821,7 @@ function chooseScalarStyle(string, singleLineOnly, indentPerLevel, lineWidth, } // At this point we know block styles are valid. // Prefer literal style unless we want to fold. - if (!forceQuotes) { - return hasFoldableLine ? STYLE_FOLDED : STYLE_LITERAL; - } - return quotingType === QUOTING_TYPE_DOUBLE ? STYLE_DOUBLE : STYLE_SINGLE; + return hasFoldableLine ? STYLE_FOLDED : STYLE_LITERAL; } // Note: line breaking/folding is implemented for only the folded style. @@ -106828,15 +107830,14 @@ function chooseScalarStyle(string, singleLineOnly, indentPerLevel, lineWidth, // • No ending newline => unaffected; already using strip "-" chomping. // • Ending newline => removed then restored. // Importantly, this keeps the "+" chomp indicator from gaining an extra line. -function writeScalar(state, string, level, iskey, inblock) { +function writeScalar(state, string, level, iskey) { state.dump = (function () { if (string.length === 0) { - return state.quotingType === QUOTING_TYPE_DOUBLE ? '""' : "''"; + return "''"; } - if (!state.noCompatMode) { - if (DEPRECATED_BOOLEANS_SYNTAX.indexOf(string) !== -1 || DEPRECATED_BASE60_SYNTAX.test(string)) { - return state.quotingType === QUOTING_TYPE_DOUBLE ? ('"' + string + '"') : ("'" + string + "'"); - } + if (!state.noCompatMode && + DEPRECATED_BOOLEANS_SYNTAX.indexOf(string) !== -1) { + return "'" + string + "'"; } var indent = state.indent * Math.max(1, level); // no 0-indent scalars @@ -106858,9 +107859,7 @@ function writeScalar(state, string, level, iskey, inblock) { return testImplicitResolving(state, string); } - switch (chooseScalarStyle(string, singleLineOnly, state.indent, lineWidth, - testAmbiguity, state.quotingType, state.forceQuotes && !iskey, inblock)) { - + switch (chooseScalarStyle(string, singleLineOnly, state.indent, lineWidth, testAmbiguity)) { case STYLE_PLAIN: return string; case STYLE_SINGLE: @@ -106977,19 +107976,25 @@ function foldLine(line, width) { // Escapes a double-quoted string. function escapeString(string) { var result = ''; - var char = 0; + var char, nextChar; var escapeSeq; - for (var i = 0; i < string.length; char >= 0x10000 ? i += 2 : i++) { - char = codePointAt(string, i); - escapeSeq = ESCAPE_SEQUENCES[char]; - - if (!escapeSeq && isPrintable(char)) { - result += string[i]; - if (char >= 0x10000) result += string[i + 1]; - } else { - result += escapeSeq || encodeHex(char); + for (var i = 0; i < string.length; i++) { + char = string.charCodeAt(i); + // Check for surrogate pairs (reference Unicode 3.0 section "3.7 Surrogates"). + if (char >= 0xD800 && char <= 0xDBFF/* high surrogate */) { + nextChar = string.charCodeAt(i + 1); + if (nextChar >= 0xDC00 && nextChar <= 0xDFFF/* low surrogate */) { + // Combine the surrogate pair and store it escaped. + result += encodeHex((char - 0xD800) * 0x400 + nextChar - 0xDC00 + 0x10000); + // Advance index one extra since we already used that char here. + i++; continue; + } } + escapeSeq = ESCAPE_SEQUENCES[char]; + result += !escapeSeq && isPrintable(char) + ? string[i] + : escapeSeq || encodeHex(char); } return result; @@ -106999,22 +108004,12 @@ function writeFlowSequence(state, level, object) { var _result = '', _tag = state.tag, index, - length, - value; + length; for (index = 0, length = object.length; index < length; index += 1) { - value = object[index]; - - if (state.replacer) { - value = state.replacer.call(object, String(index), value); - } - - // Write only valid elements, put null instead of invalid elements. - if (writeNode(state, level, value, false, false) || - (typeof value === 'undefined' && - writeNode(state, level, null, false, false))) { - - if (_result !== '') _result += ',' + (!state.condenseFlow ? ' ' : ''); + // Write only valid elements. + if (writeNode(state, level, object[index], false, false)) { + if (index !== 0) _result += ',' + (!state.condenseFlow ? ' ' : ''); _result += state.dump; } } @@ -107027,22 +108022,12 @@ function writeBlockSequence(state, level, object, compact) { var _result = '', _tag = state.tag, index, - length, - value; + length; for (index = 0, length = object.length; index < length; index += 1) { - value = object[index]; - - if (state.replacer) { - value = state.replacer.call(object, String(index), value); - } - - // Write only valid elements, put null instead of invalid elements. - if (writeNode(state, level + 1, value, true, true, false, true) || - (typeof value === 'undefined' && - writeNode(state, level + 1, null, true, true, false, true))) { - - if (!compact || _result !== '') { + // Write only valid elements. + if (writeNode(state, level + 1, object[index], true, true)) { + if (!compact || index !== 0) { _result += generateNextLine(state, level); } @@ -107073,17 +108058,13 @@ function writeFlowMapping(state, level, object) { for (index = 0, length = objectKeyList.length; index < length; index += 1) { pairBuffer = ''; - if (_result !== '') pairBuffer += ', '; + if (index !== 0) pairBuffer += ', '; if (state.condenseFlow) pairBuffer += '"'; objectKey = objectKeyList[index]; objectValue = object[objectKey]; - if (state.replacer) { - objectValue = state.replacer.call(object, objectKey, objectValue); - } - if (!writeNode(state, level, objectKey, false, false)) { continue; // Skip this pair because of invalid key; } @@ -107132,17 +108113,13 @@ function writeBlockMapping(state, level, object, compact) { for (index = 0, length = objectKeyList.length; index < length; index += 1) { pairBuffer = ''; - if (!compact || _result !== '') { + if (!compact || index !== 0) { pairBuffer += generateNextLine(state, level); } objectKey = objectKeyList[index]; objectValue = object[objectKey]; - if (state.replacer) { - objectValue = state.replacer.call(object, objectKey, objectValue); - } - if (!writeNode(state, level + 1, objectKey, true, true, true)) { continue; // Skip this pair because of invalid key. } @@ -107196,15 +108173,7 @@ function detectType(state, object, explicit) { (!type.instanceOf || ((typeof object === 'object') && (object instanceof type.instanceOf))) && (!type.predicate || type.predicate(object))) { - if (explicit) { - if (type.multi && type.representName) { - state.tag = type.representName(object); - } else { - state.tag = type.tag; - } - } else { - state.tag = '?'; - } + state.tag = explicit ? type.tag : '?'; if (type.represent) { style = state.styleMap[type.tag] || type.defaultStyle; @@ -107230,7 +108199,7 @@ function detectType(state, object, explicit) { // Serializes `object` and writes it to global `result`. // Returns true on success, or false on invalid object. // -function writeNode(state, level, object, block, compact, iskey, isblockseq) { +function writeNode(state, level, object, block, compact, iskey) { state.tag = null; state.dump = object; @@ -107239,8 +108208,6 @@ function writeNode(state, level, object, block, compact, iskey, isblockseq) { } var type = _toString.call(state.dump); - var inblock = block; - var tagStr; if (block) { block = (state.flowLevel < 0 || state.flowLevel > level); @@ -107278,59 +108245,29 @@ function writeNode(state, level, object, block, compact, iskey, isblockseq) { } } } else if (type === '[object Array]') { + var arrayLevel = (state.noArrayIndent && (level > 0)) ? level - 1 : level; if (block && (state.dump.length !== 0)) { - if (state.noArrayIndent && !isblockseq && level > 0) { - writeBlockSequence(state, level - 1, state.dump, compact); - } else { - writeBlockSequence(state, level, state.dump, compact); - } + writeBlockSequence(state, arrayLevel, state.dump, compact); if (duplicate) { state.dump = '&ref_' + duplicateIndex + state.dump; } } else { - writeFlowSequence(state, level, state.dump); + writeFlowSequence(state, arrayLevel, state.dump); if (duplicate) { state.dump = '&ref_' + duplicateIndex + ' ' + state.dump; } } } else if (type === '[object String]') { if (state.tag !== '?') { - writeScalar(state, state.dump, level, iskey, inblock); + writeScalar(state, state.dump, level, iskey); } - } else if (type === '[object Undefined]') { - return false; } else { if (state.skipInvalid) return false; throw new YAMLException('unacceptable kind of an object to dump ' + type); } if (state.tag !== null && state.tag !== '?') { - // Need to encode all characters except those allowed by the spec: - // - // [35] ns-dec-digit ::= [#x30-#x39] /* 0-9 */ - // [36] ns-hex-digit ::= ns-dec-digit - // | [#x41-#x46] /* A-F */ | [#x61-#x66] /* a-f */ - // [37] ns-ascii-letter ::= [#x41-#x5A] /* A-Z */ | [#x61-#x7A] /* a-z */ - // [38] ns-word-char ::= ns-dec-digit | ns-ascii-letter | “-” - // [39] ns-uri-char ::= “%” ns-hex-digit ns-hex-digit | ns-word-char | “#” - // | “;” | “/” | “?” | “:” | “@” | “&” | “=” | “+” | “$” | “,” - // | “_” | “.” | “!” | “~” | “*” | “'” | “(” | “)” | “[” | “]” - // - // Also need to encode '!' because it has special meaning (end of tag prefix). - // - tagStr = encodeURI( - state.tag[0] === '!' ? state.tag.slice(1) : state.tag - ).replace(/!/g, '%21'); - - if (state.tag[0] === '!') { - tagStr = '!' + tagStr; - } else if (tagStr.slice(0, 18) === 'tag:yaml.org,2002:') { - tagStr = '!!' + tagStr.slice(18); - } else { - tagStr = '!<' + tagStr + '>'; - } - - state.dump = tagStr + ' ' + state.dump; + state.dump = '!<' + state.tag + '> ' + state.dump; } } @@ -107387,23 +108324,22 @@ function dump(input, options) { if (!state.noRefs) getDuplicateReferences(input, state); - var value = input; - - if (state.replacer) { - value = state.replacer.call({ '': value }, '', value); - } - - if (writeNode(state, 0, value, true, true)) return state.dump + '\n'; + if (writeNode(state, 0, input, true, true)) return state.dump + '\n'; return ''; } -module.exports.dump = dump; +function safeDump(input, options) { + return dump(input, common.extend({ schema: DEFAULT_SAFE_SCHEMA }, options)); +} + +module.exports.dump = dump; +module.exports.safeDump = safeDump; /***/ }), -/***/ 68179: +/***/ 65199: /***/ ((module) => { "use strict"; @@ -107411,26 +108347,6 @@ module.exports.dump = dump; // - -function formatError(exception, compact) { - var where = '', message = exception.reason || '(unknown reason)'; - - if (!exception.mark) return message; - - if (exception.mark.name) { - where += 'in "' + exception.mark.name + '" '; - } - - where += '(' + (exception.mark.line + 1) + ':' + (exception.mark.column + 1) + ')'; - - if (!compact && exception.mark.snippet) { - where += '\n\n' + exception.mark.snippet; - } - - return message + ' ' + where; -} - - function YAMLException(reason, mark) { // Super constructor Error.call(this); @@ -107438,7 +108354,7 @@ function YAMLException(reason, mark) { this.name = 'YAMLException'; this.reason = reason; this.mark = mark; - this.message = formatError(this, false); + this.message = (this.reason || '(unknown reason)') + (this.mark ? ' ' + this.mark.toString() : ''); // Include stack trace in error object if (Error.captureStackTrace) { @@ -107457,7 +108373,15 @@ YAMLException.prototype.constructor = YAMLException; YAMLException.prototype.toString = function toString(compact) { - return this.name + ': ' + formatError(this, compact); + var result = this.name + ': '; + + result += this.reason || '(unknown reason)'; + + if (!compact && this.mark) { + result += ' ' + this.mark.toString(); + } + + return result; }; @@ -107466,7 +108390,7 @@ module.exports = YAMLException; /***/ }), -/***/ 51161: +/***/ 45190: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; @@ -107474,10 +108398,11 @@ module.exports = YAMLException; /*eslint-disable max-len,no-use-before-define*/ -var common = __nccwpck_require__(26829); -var YAMLException = __nccwpck_require__(68179); -var makeSnippet = __nccwpck_require__(96975); -var DEFAULT_SCHEMA = __nccwpck_require__(18759); +var common = __nccwpck_require__(59136); +var YAMLException = __nccwpck_require__(65199); +var Mark = __nccwpck_require__(55426); +var DEFAULT_SAFE_SCHEMA = __nccwpck_require__(48949); +var DEFAULT_FULL_SCHEMA = __nccwpck_require__(56874); var _hasOwnProperty = Object.prototype.hasOwnProperty; @@ -107604,12 +108529,9 @@ function State(input, options) { this.input = input; this.filename = options['filename'] || null; - this.schema = options['schema'] || DEFAULT_SCHEMA; + this.schema = options['schema'] || DEFAULT_FULL_SCHEMA; this.onWarning = options['onWarning'] || null; - // (Hidden) Remove? makes the loader to expect YAML 1.1 documents - // if such documents have no explicit %YAML directive this.legacy = options['legacy'] || false; - this.json = options['json'] || false; this.listener = options['listener'] || null; @@ -107622,10 +108544,6 @@ function State(input, options) { this.lineStart = 0; this.lineIndent = 0; - // position of first leading tab in the current line, - // used to make sure there are no tabs in the indentation - this.firstTabInLine = -1; - this.documents = []; /* @@ -107642,17 +108560,9 @@ function State(input, options) { function generateError(state, message) { - var mark = { - name: state.filename, - buffer: state.input.slice(0, -1), // omit trailing \0 - position: state.position, - line: state.line, - column: state.position - state.lineStart - }; - - mark.snippet = makeSnippet(mark); - - return new YAMLException(message, mark); + return new YAMLException( + message, + new Mark(state.filename, state.input, state.position, state.line, (state.position - state.lineStart))); } function throwError(state, message) { @@ -107724,12 +108634,6 @@ var directiveHandlers = { throwError(state, 'ill-formed tag prefix (second argument) of the TAG directive'); } - try { - prefix = decodeURIComponent(prefix); - } catch (err) { - throwError(state, 'tag prefix is malformed: ' + prefix); - } - state.tagMap[handle] = prefix; } }; @@ -107776,9 +108680,7 @@ function mergeMappings(state, destination, source, overridableKeys) { } } -function storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, - startLine, startLineStart, startPos) { - +function storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, startLine, startPos) { var index, quantity; // The output is a plain object here, so keys can only be strings. @@ -107825,22 +108727,10 @@ function storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valu !_hasOwnProperty.call(overridableKeys, keyNode) && _hasOwnProperty.call(_result, keyNode)) { state.line = startLine || state.line; - state.lineStart = startLineStart || state.lineStart; state.position = startPos || state.position; throwError(state, 'duplicated mapping key'); } - - // used for this specific key only because Object.defineProperty is slow - if (keyNode === '__proto__') { - Object.defineProperty(_result, keyNode, { - configurable: true, - enumerable: true, - writable: true, - value: valueNode - }); - } else { - _result[keyNode] = valueNode; - } + _result[keyNode] = valueNode; delete overridableKeys[keyNode]; } @@ -107865,7 +108755,6 @@ function readLineBreak(state) { state.line += 1; state.lineStart = state.position; - state.firstTabInLine = -1; } function skipSeparationSpace(state, allowComments, checkIndent) { @@ -107874,9 +108763,6 @@ function skipSeparationSpace(state, allowComments, checkIndent) { while (ch !== 0) { while (is_WHITE_SPACE(ch)) { - if (ch === 0x09/* Tab */ && state.firstTabInLine === -1) { - state.firstTabInLine = state.position; - } ch = state.input.charCodeAt(++state.position); } @@ -108178,8 +109064,6 @@ function readDoubleQuotedScalar(state, nodeIndent) { function readFlowCollection(state, nodeIndent) { var readNext = true, _line, - _lineStart, - _pos, _tag = state.tag, _result, _anchor = state.anchor, @@ -108188,7 +109072,7 @@ function readFlowCollection(state, nodeIndent) { isPair, isExplicitPair, isMapping, - overridableKeys = Object.create(null), + overridableKeys = {}, keyNode, keyTag, valueNode, @@ -108228,9 +109112,6 @@ function readFlowCollection(state, nodeIndent) { return true; } else if (!readNext) { throwError(state, 'missed comma between flow collection entries'); - } else if (ch === 0x2C/* , */) { - // "flow collection entries can never be completely empty", as per YAML 1.2, section 7.4 - throwError(state, "expected the node content, but found ','"); } keyTag = keyNode = valueNode = null; @@ -108246,9 +109127,7 @@ function readFlowCollection(state, nodeIndent) { } } - _line = state.line; // Save the current line. - _lineStart = state.lineStart; - _pos = state.position; + _line = state.line; composeNode(state, nodeIndent, CONTEXT_FLOW_IN, false, true); keyTag = state.tag; keyNode = state.result; @@ -108265,9 +109144,9 @@ function readFlowCollection(state, nodeIndent) { } if (isMapping) { - storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, _line, _lineStart, _pos); + storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode); } else if (isPair) { - _result.push(storeMappingPair(state, null, overridableKeys, keyTag, keyNode, valueNode, _line, _lineStart, _pos)); + _result.push(storeMappingPair(state, null, overridableKeys, keyTag, keyNode, valueNode)); } else { _result.push(keyNode); } @@ -108439,10 +109318,6 @@ function readBlockSequence(state, nodeIndent) { detected = false, ch; - // there is a leading tab before this token, so it can't be a block sequence/mapping; - // it can still be flow sequence/mapping or a scalar - if (state.firstTabInLine !== -1) return false; - if (state.anchor !== null) { state.anchorMap[state.anchor] = _result; } @@ -108450,10 +109325,6 @@ function readBlockSequence(state, nodeIndent) { ch = state.input.charCodeAt(state.position); while (ch !== 0) { - if (state.firstTabInLine !== -1) { - state.position = state.firstTabInLine; - throwError(state, 'tab characters must not be used in indentation'); - } if (ch !== 0x2D/* - */) { break; @@ -108504,13 +109375,11 @@ function readBlockMapping(state, nodeIndent, flowIndent) { var following, allowCompact, _line, - _keyLine, - _keyLineStart, - _keyPos, + _pos, _tag = state.tag, _anchor = state.anchor, _result = {}, - overridableKeys = Object.create(null), + overridableKeys = {}, keyTag = null, keyNode = null, valueNode = null, @@ -108518,10 +109387,6 @@ function readBlockMapping(state, nodeIndent, flowIndent) { detected = false, ch; - // there is a leading tab before this token, so it can't be a block sequence/mapping; - // it can still be flow sequence/mapping or a scalar - if (state.firstTabInLine !== -1) return false; - if (state.anchor !== null) { state.anchorMap[state.anchor] = _result; } @@ -108529,13 +109394,9 @@ function readBlockMapping(state, nodeIndent, flowIndent) { ch = state.input.charCodeAt(state.position); while (ch !== 0) { - if (!atExplicitKey && state.firstTabInLine !== -1) { - state.position = state.firstTabInLine; - throwError(state, 'tab characters must not be used in indentation'); - } - following = state.input.charCodeAt(state.position + 1); _line = state.line; // Save the current line. + _pos = state.position; // // Explicit notation case. There are two separate blocks: @@ -108545,7 +109406,7 @@ function readBlockMapping(state, nodeIndent, flowIndent) { if (ch === 0x3F/* ? */) { if (atExplicitKey) { - storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null, _keyLine, _keyLineStart, _keyPos); + storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null); keyTag = keyNode = valueNode = null; } @@ -108568,16 +109429,7 @@ function readBlockMapping(state, nodeIndent, flowIndent) { // // Implicit notation case. Flow-style node as the key first, then ":", and the value. // - } else { - _keyLine = state.line; - _keyLineStart = state.lineStart; - _keyPos = state.position; - - if (!composeNode(state, flowIndent, CONTEXT_FLOW_OUT, false, true)) { - // Neither implicit nor explicit notation. - // Reading is done. Go to the epilogue. - break; - } + } else if (composeNode(state, flowIndent, CONTEXT_FLOW_OUT, false, true)) { if (state.line === _line) { ch = state.input.charCodeAt(state.position); @@ -108594,7 +109446,7 @@ function readBlockMapping(state, nodeIndent, flowIndent) { } if (atExplicitKey) { - storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null, _keyLine, _keyLineStart, _keyPos); + storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null); keyTag = keyNode = valueNode = null; } @@ -108621,18 +109473,15 @@ function readBlockMapping(state, nodeIndent, flowIndent) { state.anchor = _anchor; return true; // Keep the result of `composeNode`. } + + } else { + break; // Reading is done. Go to the epilogue. } // // Common reading code for both explicit and implicit notations. // if (state.line === _line || state.lineIndent > nodeIndent) { - if (atExplicitKey) { - _keyLine = state.line; - _keyLineStart = state.lineStart; - _keyPos = state.position; - } - if (composeNode(state, nodeIndent, CONTEXT_BLOCK_OUT, true, allowCompact)) { if (atExplicitKey) { keyNode = state.result; @@ -108642,7 +109491,7 @@ function readBlockMapping(state, nodeIndent, flowIndent) { } if (!atExplicitKey) { - storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, _keyLine, _keyLineStart, _keyPos); + storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, _line, _pos); keyTag = keyNode = valueNode = null; } @@ -108650,7 +109499,7 @@ function readBlockMapping(state, nodeIndent, flowIndent) { ch = state.input.charCodeAt(state.position); } - if ((state.line === _line || state.lineIndent > nodeIndent) && (ch !== 0)) { + if (state.lineIndent > nodeIndent && (ch !== 0)) { throwError(state, 'bad indentation of a mapping entry'); } else if (state.lineIndent < nodeIndent) { break; @@ -108663,7 +109512,7 @@ function readBlockMapping(state, nodeIndent, flowIndent) { // Special case: last mapping's node contains only the key in explicit notation. if (atExplicitKey) { - storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null, _keyLine, _keyLineStart, _keyPos); + storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null); } // Expose the resulting mapping. @@ -108752,12 +109601,6 @@ function readTagProperty(state) { throwError(state, 'tag name cannot contain such characters: ' + tagName); } - try { - tagName = decodeURIComponent(tagName); - } catch (err) { - throwError(state, 'tag name is malformed: ' + tagName); - } - if (isVerbatim) { state.tag = tagName; @@ -108843,7 +109686,6 @@ function composeNode(state, parentIndent, nodeContext, allowToSeek, allowCompact hasContent = false, typeIndex, typeQuantity, - typeList, type, flowIndent, blockIndent; @@ -108945,66 +109787,48 @@ function composeNode(state, parentIndent, nodeContext, allowToSeek, allowCompact } } - if (state.tag === null) { - if (state.anchor !== null) { - state.anchorMap[state.anchor] = state.result; - } + if (state.tag !== null && state.tag !== '!') { + if (state.tag === '?') { + // Implicit resolving is not allowed for non-scalar types, and '?' + // non-specific tag is only automatically assigned to plain scalars. + // + // We only need to check kind conformity in case user explicitly assigns '?' + // tag, for example like this: "! [0]" + // + if (state.result !== null && state.kind !== 'scalar') { + throwError(state, 'unacceptable node kind for ! tag; it should be "scalar", not "' + state.kind + '"'); + } - } else if (state.tag === '?') { - // Implicit resolving is not allowed for non-scalar types, and '?' - // non-specific tag is only automatically assigned to plain scalars. - // - // We only need to check kind conformity in case user explicitly assigns '?' - // tag, for example like this: "! [0]" - // - if (state.result !== null && state.kind !== 'scalar') { - throwError(state, 'unacceptable node kind for ! tag; it should be "scalar", not "' + state.kind + '"'); - } + for (typeIndex = 0, typeQuantity = state.implicitTypes.length; typeIndex < typeQuantity; typeIndex += 1) { + type = state.implicitTypes[typeIndex]; - for (typeIndex = 0, typeQuantity = state.implicitTypes.length; typeIndex < typeQuantity; typeIndex += 1) { - type = state.implicitTypes[typeIndex]; + if (type.resolve(state.result)) { // `state.result` updated in resolver if matched + state.result = type.construct(state.result); + state.tag = type.tag; + if (state.anchor !== null) { + state.anchorMap[state.anchor] = state.result; + } + break; + } + } + } else if (_hasOwnProperty.call(state.typeMap[state.kind || 'fallback'], state.tag)) { + type = state.typeMap[state.kind || 'fallback'][state.tag]; + + if (state.result !== null && type.kind !== state.kind) { + throwError(state, 'unacceptable node kind for !<' + state.tag + '> tag; it should be "' + type.kind + '", not "' + state.kind + '"'); + } - if (type.resolve(state.result)) { // `state.result` updated in resolver if matched + if (!type.resolve(state.result)) { // `state.result` updated in resolver if matched + throwError(state, 'cannot resolve a node with !<' + state.tag + '> explicit tag'); + } else { state.result = type.construct(state.result); - state.tag = type.tag; if (state.anchor !== null) { state.anchorMap[state.anchor] = state.result; } - break; } - } - } else if (state.tag !== '!') { - if (_hasOwnProperty.call(state.typeMap[state.kind || 'fallback'], state.tag)) { - type = state.typeMap[state.kind || 'fallback'][state.tag]; } else { - // looking for multi type - type = null; - typeList = state.typeMap.multi[state.kind || 'fallback']; - - for (typeIndex = 0, typeQuantity = typeList.length; typeIndex < typeQuantity; typeIndex += 1) { - if (state.tag.slice(0, typeList[typeIndex].tag.length) === typeList[typeIndex].tag) { - type = typeList[typeIndex]; - break; - } - } - } - - if (!type) { throwError(state, 'unknown tag !<' + state.tag + '>'); } - - if (state.result !== null && type.kind !== state.kind) { - throwError(state, 'unacceptable node kind for !<' + state.tag + '> tag; it should be "' + type.kind + '", not "' + state.kind + '"'); - } - - if (!type.resolve(state.result, state.tag)) { // `state.result` updated in resolver if matched - throwError(state, 'cannot resolve a node with !<' + state.tag + '> explicit tag'); - } else { - state.result = type.construct(state.result, state.tag); - if (state.anchor !== null) { - state.anchorMap[state.anchor] = state.result; - } - } } if (state.listener !== null) { @@ -109023,8 +109847,8 @@ function readDocument(state) { state.version = null; state.checkLineBreaks = state.legacy; - state.tagMap = Object.create(null); - state.anchorMap = Object.create(null); + state.tagMap = {}; + state.anchorMap = {}; while ((ch = state.input.charCodeAt(state.position)) !== 0) { skipSeparationSpace(state, true, -1); @@ -109195,13 +110019,114 @@ function load(input, options) { } -module.exports.loadAll = loadAll; -module.exports.load = load; +function safeLoadAll(input, iterator, options) { + if (typeof iterator === 'object' && iterator !== null && typeof options === 'undefined') { + options = iterator; + iterator = null; + } + + return loadAll(input, iterator, common.extend({ schema: DEFAULT_SAFE_SCHEMA }, options)); +} + + +function safeLoad(input, options) { + return load(input, common.extend({ schema: DEFAULT_SAFE_SCHEMA }, options)); +} + + +module.exports.loadAll = loadAll; +module.exports.load = load; +module.exports.safeLoadAll = safeLoadAll; +module.exports.safeLoad = safeLoad; /***/ }), -/***/ 21082: +/***/ 55426: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + + +var common = __nccwpck_require__(59136); + + +function Mark(name, buffer, position, line, column) { + this.name = name; + this.buffer = buffer; + this.position = position; + this.line = line; + this.column = column; +} + + +Mark.prototype.getSnippet = function getSnippet(indent, maxLength) { + var head, start, tail, end, snippet; + + if (!this.buffer) return null; + + indent = indent || 4; + maxLength = maxLength || 75; + + head = ''; + start = this.position; + + while (start > 0 && '\x00\r\n\x85\u2028\u2029'.indexOf(this.buffer.charAt(start - 1)) === -1) { + start -= 1; + if (this.position - start > (maxLength / 2 - 1)) { + head = ' ... '; + start += 5; + break; + } + } + + tail = ''; + end = this.position; + + while (end < this.buffer.length && '\x00\r\n\x85\u2028\u2029'.indexOf(this.buffer.charAt(end)) === -1) { + end += 1; + if (end - this.position > (maxLength / 2 - 1)) { + tail = ' ... '; + end -= 5; + break; + } + } + + snippet = this.buffer.slice(start, end); + + return common.repeat(' ', indent) + head + snippet + tail + '\n' + + common.repeat(' ', indent + this.position - start + head.length) + '^'; +}; + + +Mark.prototype.toString = function toString(compact) { + var snippet, where = ''; + + if (this.name) { + where += 'in "' + this.name + '" '; + } + + where += 'at line ' + (this.line + 1) + ', column ' + (this.column + 1); + + if (!compact) { + snippet = this.getSnippet(); + + if (snippet) { + where += ':\n' + snippet; + } + } + + return where; +}; + + +module.exports = Mark; + + +/***/ }), + +/***/ 66514: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; @@ -109209,29 +110134,31 @@ module.exports.load = load; /*eslint-disable max-len*/ -var YAMLException = __nccwpck_require__(68179); -var Type = __nccwpck_require__(6073); +var common = __nccwpck_require__(59136); +var YAMLException = __nccwpck_require__(65199); +var Type = __nccwpck_require__(30967); -function compileList(schema, name) { - var result = []; +function compileList(schema, name, result) { + var exclude = []; - schema[name].forEach(function (currentType) { - var newIndex = result.length; + schema.include.forEach(function (includedSchema) { + result = compileList(includedSchema, name, result); + }); + schema[name].forEach(function (currentType) { result.forEach(function (previousType, previousIndex) { - if (previousType.tag === currentType.tag && - previousType.kind === currentType.kind && - previousType.multi === currentType.multi) { - - newIndex = previousIndex; + if (previousType.tag === currentType.tag && previousType.kind === currentType.kind) { + exclude.push(previousIndex); } }); - result[newIndex] = currentType; + result.push(currentType); }); - return result; + return result.filter(function (type, index) { + return exclude.indexOf(index) === -1; + }); } @@ -109240,22 +110167,11 @@ function compileMap(/* lists... */) { scalar: {}, sequence: {}, mapping: {}, - fallback: {}, - multi: { - scalar: [], - sequence: [], - mapping: [], - fallback: [] - } + fallback: {} }, index, length; function collectType(type) { - if (type.multi) { - result.multi[type.kind].push(type); - result.multi['fallback'].push(type); - } else { - result[type.kind][type.tag] = result['fallback'][type.tag] = type; - } + result[type.kind][type.tag] = result['fallback'][type.tag] = type; } for (index = 0, length = arguments.length; index < length; index += 1) { @@ -109266,62 +110182,58 @@ function compileMap(/* lists... */) { function Schema(definition) { - return this.extend(definition); -} + this.include = definition.include || []; + this.implicit = definition.implicit || []; + this.explicit = definition.explicit || []; + this.implicit.forEach(function (type) { + if (type.loadKind && type.loadKind !== 'scalar') { + throw new YAMLException('There is a non-scalar type in the implicit list of a schema. Implicit resolving of such types is not supported.'); + } + }); -Schema.prototype.extend = function extend(definition) { - var implicit = []; - var explicit = []; + this.compiledImplicit = compileList(this, 'implicit', []); + this.compiledExplicit = compileList(this, 'explicit', []); + this.compiledTypeMap = compileMap(this.compiledImplicit, this.compiledExplicit); +} - if (definition instanceof Type) { - // Schema.extend(type) - explicit.push(definition); - } else if (Array.isArray(definition)) { - // Schema.extend([ type1, type2, ... ]) - explicit = explicit.concat(definition); +Schema.DEFAULT = null; - } else if (definition && (Array.isArray(definition.implicit) || Array.isArray(definition.explicit))) { - // Schema.extend({ explicit: [ type1, type2, ... ], implicit: [ type1, type2, ... ] }) - if (definition.implicit) implicit = implicit.concat(definition.implicit); - if (definition.explicit) explicit = explicit.concat(definition.explicit); - } else { - throw new YAMLException('Schema.extend argument should be a Type, [ Type ], ' + - 'or a schema definition ({ implicit: [...], explicit: [...] })'); - } - - implicit.forEach(function (type) { - if (!(type instanceof Type)) { - throw new YAMLException('Specified list of YAML types (or a single Type object) contains a non-Type object.'); - } +Schema.create = function createSchema() { + var schemas, types; - if (type.loadKind && type.loadKind !== 'scalar') { - throw new YAMLException('There is a non-scalar type in the implicit list of a schema. Implicit resolving of such types is not supported.'); - } + switch (arguments.length) { + case 1: + schemas = Schema.DEFAULT; + types = arguments[0]; + break; - if (type.multi) { - throw new YAMLException('There is a multi type in the implicit list of a schema. Multi tags can only be listed as explicit.'); - } - }); + case 2: + schemas = arguments[0]; + types = arguments[1]; + break; - explicit.forEach(function (type) { - if (!(type instanceof Type)) { - throw new YAMLException('Specified list of YAML types (or a single Type object) contains a non-Type object.'); - } - }); + default: + throw new YAMLException('Wrong number of arguments for Schema.create function'); + } - var result = Object.create(Schema.prototype); + schemas = common.toArray(schemas); + types = common.toArray(types); - result.implicit = (this.implicit || []).concat(implicit); - result.explicit = (this.explicit || []).concat(explicit); + if (!schemas.every(function (schema) { return schema instanceof Schema; })) { + throw new YAMLException('Specified list of super schemas (or a single Schema object) contains a non-Schema object.'); + } - result.compiledImplicit = compileList(result, 'implicit'); - result.compiledExplicit = compileList(result, 'explicit'); - result.compiledTypeMap = compileMap(result.compiledImplicit, result.compiledExplicit); + if (!types.every(function (type) { return type instanceof Type; })) { + throw new YAMLException('Specified list of YAML types (or a single Type object) contains a non-Type object.'); + } - return result; + return new Schema({ + include: schemas, + explicit: types + }); }; @@ -109330,7 +110242,7 @@ module.exports = Schema; /***/ }), -/***/ 12011: +/***/ 92183: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; @@ -109344,219 +110256,160 @@ module.exports = Schema; -module.exports = __nccwpck_require__(1035); +var Schema = __nccwpck_require__(66514); + + +module.exports = new Schema({ + include: [ + __nccwpck_require__(1571) + ] +}); /***/ }), -/***/ 18759: +/***/ 56874: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -// JS-YAML's default schema for `safeLoad` function. +// JS-YAML's default schema for `load` function. // It is not described in the YAML specification. // -// This schema is based on standard YAML's Core schema and includes most of -// extra types described at YAML tag repository. (http://yaml.org/type/) +// This schema is based on JS-YAML's default safe schema and includes +// JavaScript-specific types: !!js/undefined, !!js/regexp and !!js/function. +// +// Also this schema is used as default base schema at `Schema.create` function. -module.exports = __nccwpck_require__(12011).extend({ - implicit: [ - __nccwpck_require__(99212), - __nccwpck_require__(86104) +var Schema = __nccwpck_require__(66514); + + +module.exports = Schema.DEFAULT = new Schema({ + include: [ + __nccwpck_require__(48949) ], explicit: [ - __nccwpck_require__(77900), - __nccwpck_require__(19046), - __nccwpck_require__(96860), - __nccwpck_require__(79548) + __nccwpck_require__(25914), + __nccwpck_require__(69242), + __nccwpck_require__(27278) ] }); /***/ }), -/***/ 28562: +/***/ 48949: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -// Standard YAML's Failsafe schema. -// http://www.yaml.org/spec/1.2/spec.html#id2802346 +// JS-YAML's default schema for `safeLoad` function. +// It is not described in the YAML specification. +// +// This schema is based on standard YAML's Core schema and includes most of +// extra types described at YAML tag repository. (http://yaml.org/type/) -var Schema = __nccwpck_require__(21082); +var Schema = __nccwpck_require__(66514); module.exports = new Schema({ + include: [ + __nccwpck_require__(92183) + ], + implicit: [ + __nccwpck_require__(83714), + __nccwpck_require__(81393) + ], explicit: [ - __nccwpck_require__(23619), - __nccwpck_require__(67283), - __nccwpck_require__(86150) + __nccwpck_require__(32551), + __nccwpck_require__(96668), + __nccwpck_require__(76039), + __nccwpck_require__(69237) ] }); /***/ }), -/***/ 1035: +/***/ 66037: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -// Standard YAML's JSON schema. -// http://www.yaml.org/spec/1.2/spec.html#id2803231 -// -// NOTE: JS-YAML does not support schema-specific tag resolution restrictions. -// So, this schema is not such strict as defined in the YAML specification. -// It allows numbers in binary notaion, use `Null` and `NULL` as `null`, etc. +// Standard YAML's Failsafe schema. +// http://www.yaml.org/spec/1.2/spec.html#id2802346 -module.exports = __nccwpck_require__(28562).extend({ - implicit: [ - __nccwpck_require__(20721), - __nccwpck_require__(64993), - __nccwpck_require__(11615), - __nccwpck_require__(42705) +var Schema = __nccwpck_require__(66514); + + +module.exports = new Schema({ + explicit: [ + __nccwpck_require__(52672), + __nccwpck_require__(5490), + __nccwpck_require__(31173) ] }); /***/ }), -/***/ 96975: +/***/ 1571: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; +// Standard YAML's JSON schema. +// http://www.yaml.org/spec/1.2/spec.html#id2803231 +// +// NOTE: JS-YAML does not support schema-specific tag resolution restrictions. +// So, this schema is not such strict as defined in the YAML specification. +// It allows numbers in binary notaion, use `Null` and `NULL` as `null`, etc. -var common = __nccwpck_require__(26829); - - -// get snippet for a single line, respecting maxLength -function getLine(buffer, lineStart, lineEnd, position, maxLineLength) { - var head = ''; - var tail = ''; - var maxHalfLength = Math.floor(maxLineLength / 2) - 1; - - if (position - lineStart > maxHalfLength) { - head = ' ... '; - lineStart = position - maxHalfLength + head.length; - } - - if (lineEnd - position > maxHalfLength) { - tail = ' ...'; - lineEnd = position + maxHalfLength - tail.length; - } - - return { - str: head + buffer.slice(lineStart, lineEnd).replace(/\t/g, '→') + tail, - pos: position - lineStart + head.length // relative position - }; -} - - -function padStart(string, max) { - return common.repeat(' ', max - string.length) + string; -} - - -function makeSnippet(mark, options) { - options = Object.create(options || null); - - if (!mark.buffer) return null; - - if (!options.maxLength) options.maxLength = 79; - if (typeof options.indent !== 'number') options.indent = 1; - if (typeof options.linesBefore !== 'number') options.linesBefore = 3; - if (typeof options.linesAfter !== 'number') options.linesAfter = 2; - - var re = /\r?\n|\r|\0/g; - var lineStarts = [ 0 ]; - var lineEnds = []; - var match; - var foundLineNo = -1; - - while ((match = re.exec(mark.buffer))) { - lineEnds.push(match.index); - lineStarts.push(match.index + match[0].length); - if (mark.position <= match.index && foundLineNo < 0) { - foundLineNo = lineStarts.length - 2; - } - } - if (foundLineNo < 0) foundLineNo = lineStarts.length - 1; +var Schema = __nccwpck_require__(66514); - var result = '', i, line; - var lineNoLength = Math.min(mark.line + options.linesAfter, lineEnds.length).toString().length; - var maxLineLength = options.maxLength - (options.indent + lineNoLength + 3); - for (i = 1; i <= options.linesBefore; i++) { - if (foundLineNo - i < 0) break; - line = getLine( - mark.buffer, - lineStarts[foundLineNo - i], - lineEnds[foundLineNo - i], - mark.position - (lineStarts[foundLineNo] - lineStarts[foundLineNo - i]), - maxLineLength - ); - result = common.repeat(' ', options.indent) + padStart((mark.line - i + 1).toString(), lineNoLength) + - ' | ' + line.str + '\n' + result; - } - - line = getLine(mark.buffer, lineStarts[foundLineNo], lineEnds[foundLineNo], mark.position, maxLineLength); - result += common.repeat(' ', options.indent) + padStart((mark.line + 1).toString(), lineNoLength) + - ' | ' + line.str + '\n'; - result += common.repeat('-', options.indent + lineNoLength + 3 + line.pos) + '^' + '\n'; - - for (i = 1; i <= options.linesAfter; i++) { - if (foundLineNo + i >= lineEnds.length) break; - line = getLine( - mark.buffer, - lineStarts[foundLineNo + i], - lineEnds[foundLineNo + i], - mark.position - (lineStarts[foundLineNo] - lineStarts[foundLineNo + i]), - maxLineLength - ); - result += common.repeat(' ', options.indent) + padStart((mark.line + i + 1).toString(), lineNoLength) + - ' | ' + line.str + '\n'; - } - - return result.replace(/\n$/, ''); -} - - -module.exports = makeSnippet; +module.exports = new Schema({ + include: [ + __nccwpck_require__(66037) + ], + implicit: [ + __nccwpck_require__(22671), + __nccwpck_require__(94675), + __nccwpck_require__(89963), + __nccwpck_require__(15564) + ] +}); /***/ }), -/***/ 6073: +/***/ 30967: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -var YAMLException = __nccwpck_require__(68179); +var YAMLException = __nccwpck_require__(65199); var TYPE_CONSTRUCTOR_OPTIONS = [ 'kind', - 'multi', 'resolve', 'construct', 'instanceOf', 'predicate', 'represent', - 'representName', 'defaultStyle', 'styleAliases' ]; @@ -109591,18 +110444,15 @@ function Type(tag, options) { }); // TODO: Add tag format check. - this.options = options; // keep original options in case user wants to extend this type later - this.tag = tag; - this.kind = options['kind'] || null; - this.resolve = options['resolve'] || function () { return true; }; - this.construct = options['construct'] || function (data) { return data; }; - this.instanceOf = options['instanceOf'] || null; - this.predicate = options['predicate'] || null; - this.represent = options['represent'] || null; - this.representName = options['representName'] || null; - this.defaultStyle = options['defaultStyle'] || null; - this.multi = options['multi'] || false; - this.styleAliases = compileStyleAliases(options['styleAliases'] || null); + this.tag = tag; + this.kind = options['kind'] || null; + this.resolve = options['resolve'] || function () { return true; }; + this.construct = options['construct'] || function (data) { return data; }; + this.instanceOf = options['instanceOf'] || null; + this.predicate = options['predicate'] || null; + this.represent = options['represent'] || null; + this.defaultStyle = options['defaultStyle'] || null; + this.styleAliases = compileStyleAliases(options['styleAliases'] || null); if (YAML_NODE_KINDS.indexOf(this.kind) === -1) { throw new YAMLException('Unknown kind "' + this.kind + '" is specified for "' + tag + '" YAML type.'); @@ -109614,7 +110464,7 @@ module.exports = Type; /***/ }), -/***/ 77900: +/***/ 32551: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; @@ -109622,8 +110472,15 @@ module.exports = Type; /*eslint-disable no-bitwise*/ +var NodeBuffer; + +try { + // A trick for browserified version, to not include `Buffer` shim + var _require = require; + NodeBuffer = _require('buffer').Buffer; +} catch (__) {} -var Type = __nccwpck_require__(6073); +var Type = __nccwpck_require__(30967); // [ 64, 65, 66 ] -> [ padding, CR, LF ] @@ -109687,7 +110544,13 @@ function constructYamlBinary(data) { result.push((bits >> 4) & 0xFF); } - return new Uint8Array(result); + // Wrap into Buffer for NodeJS and leave Array for browser + if (NodeBuffer) { + // Support node 6.+ Buffer API when available + return NodeBuffer.from ? NodeBuffer.from(result) : new NodeBuffer(result); + } + + return result; } function representYamlBinary(object /*, style*/) { @@ -109732,8 +110595,8 @@ function representYamlBinary(object /*, style*/) { return result; } -function isBinary(obj) { - return Object.prototype.toString.call(obj) === '[object Uint8Array]'; +function isBinary(object) { + return NodeBuffer && NodeBuffer.isBuffer(object); } module.exports = new Type('tag:yaml.org,2002:binary', { @@ -109747,13 +110610,13 @@ module.exports = new Type('tag:yaml.org,2002:binary', { /***/ }), -/***/ 64993: +/***/ 94675: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -var Type = __nccwpck_require__(6073); +var Type = __nccwpck_require__(30967); function resolveYamlBoolean(data) { if (data === null) return false; @@ -109790,21 +110653,23 @@ module.exports = new Type('tag:yaml.org,2002:bool', { /***/ }), -/***/ 42705: +/***/ 15564: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -var common = __nccwpck_require__(26829); -var Type = __nccwpck_require__(6073); +var common = __nccwpck_require__(59136); +var Type = __nccwpck_require__(30967); var YAML_FLOAT_PATTERN = new RegExp( // 2.5e4, 2.5 and integers - '^(?:[-+]?(?:[0-9][0-9_]*)(?:\\.[0-9_]*)?(?:[eE][-+]?[0-9]+)?' + + '^(?:[-+]?(?:0|[1-9][0-9_]*)(?:\\.[0-9_]*)?(?:[eE][-+]?[0-9]+)?' + // .2e4, .2 // special case, seems not from spec '|\\.[0-9_]+(?:[eE][-+]?[0-9]+)?' + + // 20:59 + '|[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]*' + // .inf '|[-+]?\\.(?:inf|Inf|INF)' + // .nan @@ -109824,10 +110689,11 @@ function resolveYamlFloat(data) { } function constructYamlFloat(data) { - var value, sign; + var value, sign, base, digits; value = data.replace(/_/g, '').toLowerCase(); sign = value[0] === '-' ? -1 : 1; + digits = []; if ('+-'.indexOf(value[0]) >= 0) { value = value.slice(1); @@ -109838,6 +110704,22 @@ function constructYamlFloat(data) { } else if (value === '.nan') { return NaN; + + } else if (value.indexOf(':') >= 0) { + value.split(':').forEach(function (v) { + digits.unshift(parseFloat(v, 10)); + }); + + value = 0.0; + base = 1; + + digits.forEach(function (d) { + value += d * base; + base *= 60; + }); + + return sign * value; + } return sign * parseFloat(value, 10); } @@ -109895,14 +110777,14 @@ module.exports = new Type('tag:yaml.org,2002:float', { /***/ }), -/***/ 11615: +/***/ 89963: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -var common = __nccwpck_require__(26829); -var Type = __nccwpck_require__(6073); +var common = __nccwpck_require__(59136); +var Type = __nccwpck_require__(30967); function isHexCode(c) { return ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) || @@ -109969,22 +110851,17 @@ function resolveYamlInteger(data) { return hasDigits && ch !== '_'; } - - if (ch === 'o') { - // base 8 - index++; - - for (; index < max; index++) { - ch = data[index]; - if (ch === '_') continue; - if (!isOctCode(data.charCodeAt(index))) return false; - hasDigits = true; - } - return hasDigits && ch !== '_'; + // base 8 + for (; index < max; index++) { + ch = data[index]; + if (ch === '_') continue; + if (!isOctCode(data.charCodeAt(index))) return false; + hasDigits = true; } + return hasDigits && ch !== '_'; } - // base 10 (except 0) + // base 10 (except 0) or base 60 // value should not start with `_`; if (ch === '_') return false; @@ -109992,6 +110869,7 @@ function resolveYamlInteger(data) { for (; index < max; index++) { ch = data[index]; if (ch === '_') continue; + if (ch === ':') break; if (!isDecCode(data.charCodeAt(index))) { return false; } @@ -110001,11 +110879,15 @@ function resolveYamlInteger(data) { // Should have digits and should not end with `_` if (!hasDigits || ch === '_') return false; - return true; + // if !base60 - done; + if (ch !== ':') return true; + + // base60 almost not used, no needs to optimize + return /^(:[0-5]?[0-9])+$/.test(data.slice(index)); } function constructYamlInteger(data) { - var value = data, sign = 1, ch; + var value = data, sign = 1, ch, base, digits = []; if (value.indexOf('_') !== -1) { value = value.replace(/_/g, ''); @@ -110023,8 +110905,25 @@ function constructYamlInteger(data) { if (ch === '0') { if (value[1] === 'b') return sign * parseInt(value.slice(2), 2); - if (value[1] === 'x') return sign * parseInt(value.slice(2), 16); - if (value[1] === 'o') return sign * parseInt(value.slice(2), 8); + if (value[1] === 'x') return sign * parseInt(value, 16); + return sign * parseInt(value, 8); + } + + if (value.indexOf(':') !== -1) { + value.split(':').forEach(function (v) { + digits.unshift(parseInt(v, 10)); + }); + + value = 0; + base = 1; + + digits.forEach(function (d) { + value += (d * base); + base *= 60; + }); + + return sign * value; + } return sign * parseInt(value, 10); @@ -110042,7 +110941,7 @@ module.exports = new Type('tag:yaml.org,2002:int', { predicate: isInteger, represent: { binary: function (obj) { return obj >= 0 ? '0b' + obj.toString(2) : '-0b' + obj.toString(2).slice(1); }, - octal: function (obj) { return obj >= 0 ? '0o' + obj.toString(8) : '-0o' + obj.toString(8).slice(1); }, + octal: function (obj) { return obj >= 0 ? '0' + obj.toString(8) : '-0' + obj.toString(8).slice(1); }, decimal: function (obj) { return obj.toString(10); }, /* eslint-disable max-len */ hexadecimal: function (obj) { return obj >= 0 ? '0x' + obj.toString(16).toUpperCase() : '-0x' + obj.toString(16).toUpperCase().slice(1); } @@ -110059,13 +110958,218 @@ module.exports = new Type('tag:yaml.org,2002:int', { /***/ }), -/***/ 86150: +/***/ 27278: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var esprima; + +// Browserified version does not have esprima +// +// 1. For node.js just require module as deps +// 2. For browser try to require mudule via external AMD system. +// If not found - try to fallback to window.esprima. If not +// found too - then fail to parse. +// +try { + // workaround to exclude package from browserify list. + var _require = require; + esprima = _require('esprima'); +} catch (_) { + /* eslint-disable no-redeclare */ + /* global window */ + if (typeof window !== 'undefined') esprima = window.esprima; +} + +var Type = __nccwpck_require__(30967); + +function resolveJavascriptFunction(data) { + if (data === null) return false; + + try { + var source = '(' + data + ')', + ast = esprima.parse(source, { range: true }); + + if (ast.type !== 'Program' || + ast.body.length !== 1 || + ast.body[0].type !== 'ExpressionStatement' || + (ast.body[0].expression.type !== 'ArrowFunctionExpression' && + ast.body[0].expression.type !== 'FunctionExpression')) { + return false; + } + + return true; + } catch (err) { + return false; + } +} + +function constructJavascriptFunction(data) { + /*jslint evil:true*/ + + var source = '(' + data + ')', + ast = esprima.parse(source, { range: true }), + params = [], + body; + + if (ast.type !== 'Program' || + ast.body.length !== 1 || + ast.body[0].type !== 'ExpressionStatement' || + (ast.body[0].expression.type !== 'ArrowFunctionExpression' && + ast.body[0].expression.type !== 'FunctionExpression')) { + throw new Error('Failed to resolve function'); + } + + ast.body[0].expression.params.forEach(function (param) { + params.push(param.name); + }); + + body = ast.body[0].expression.body.range; + + // Esprima's ranges include the first '{' and the last '}' characters on + // function expressions. So cut them out. + if (ast.body[0].expression.body.type === 'BlockStatement') { + /*eslint-disable no-new-func*/ + return new Function(params, source.slice(body[0] + 1, body[1] - 1)); + } + // ES6 arrow functions can omit the BlockStatement. In that case, just return + // the body. + /*eslint-disable no-new-func*/ + return new Function(params, 'return ' + source.slice(body[0], body[1])); +} + +function representJavascriptFunction(object /*, style*/) { + return object.toString(); +} + +function isFunction(object) { + return Object.prototype.toString.call(object) === '[object Function]'; +} + +module.exports = new Type('tag:yaml.org,2002:js/function', { + kind: 'scalar', + resolve: resolveJavascriptFunction, + construct: constructJavascriptFunction, + predicate: isFunction, + represent: representJavascriptFunction +}); + + +/***/ }), + +/***/ 69242: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var Type = __nccwpck_require__(30967); + +function resolveJavascriptRegExp(data) { + if (data === null) return false; + if (data.length === 0) return false; + + var regexp = data, + tail = /\/([gim]*)$/.exec(data), + modifiers = ''; + + // if regexp starts with '/' it can have modifiers and must be properly closed + // `/foo/gim` - modifiers tail can be maximum 3 chars + if (regexp[0] === '/') { + if (tail) modifiers = tail[1]; + + if (modifiers.length > 3) return false; + // if expression starts with /, is should be properly terminated + if (regexp[regexp.length - modifiers.length - 1] !== '/') return false; + } + + return true; +} + +function constructJavascriptRegExp(data) { + var regexp = data, + tail = /\/([gim]*)$/.exec(data), + modifiers = ''; + + // `/foo/gim` - tail can be maximum 4 chars + if (regexp[0] === '/') { + if (tail) modifiers = tail[1]; + regexp = regexp.slice(1, regexp.length - modifiers.length - 1); + } + + return new RegExp(regexp, modifiers); +} + +function representJavascriptRegExp(object /*, style*/) { + var result = '/' + object.source + '/'; + + if (object.global) result += 'g'; + if (object.multiline) result += 'm'; + if (object.ignoreCase) result += 'i'; + + return result; +} + +function isRegExp(object) { + return Object.prototype.toString.call(object) === '[object RegExp]'; +} + +module.exports = new Type('tag:yaml.org,2002:js/regexp', { + kind: 'scalar', + resolve: resolveJavascriptRegExp, + construct: constructJavascriptRegExp, + predicate: isRegExp, + represent: representJavascriptRegExp +}); + + +/***/ }), + +/***/ 25914: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -var Type = __nccwpck_require__(6073); +var Type = __nccwpck_require__(30967); + +function resolveJavascriptUndefined() { + return true; +} + +function constructJavascriptUndefined() { + /*eslint-disable no-undefined*/ + return undefined; +} + +function representJavascriptUndefined() { + return ''; +} + +function isUndefined(object) { + return typeof object === 'undefined'; +} + +module.exports = new Type('tag:yaml.org,2002:js/undefined', { + kind: 'scalar', + resolve: resolveJavascriptUndefined, + construct: constructJavascriptUndefined, + predicate: isUndefined, + represent: representJavascriptUndefined +}); + + +/***/ }), + +/***/ 31173: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var Type = __nccwpck_require__(30967); module.exports = new Type('tag:yaml.org,2002:map', { kind: 'mapping', @@ -110075,13 +111179,13 @@ module.exports = new Type('tag:yaml.org,2002:map', { /***/ }), -/***/ 86104: +/***/ 81393: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -var Type = __nccwpck_require__(6073); +var Type = __nccwpck_require__(30967); function resolveYamlMerge(data) { return data === '<<' || data === null; @@ -110095,13 +111199,13 @@ module.exports = new Type('tag:yaml.org,2002:merge', { /***/ }), -/***/ 20721: +/***/ 22671: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -var Type = __nccwpck_require__(6073); +var Type = __nccwpck_require__(30967); function resolveYamlNull(data) { if (data === null) return true; @@ -110129,8 +111233,7 @@ module.exports = new Type('tag:yaml.org,2002:null', { canonical: function () { return '~'; }, lowercase: function () { return 'null'; }, uppercase: function () { return 'NULL'; }, - camelcase: function () { return 'Null'; }, - empty: function () { return ''; } + camelcase: function () { return 'Null'; } }, defaultStyle: 'lowercase' }); @@ -110138,13 +111241,13 @@ module.exports = new Type('tag:yaml.org,2002:null', { /***/ }), -/***/ 19046: +/***/ 96668: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -var Type = __nccwpck_require__(6073); +var Type = __nccwpck_require__(30967); var _hasOwnProperty = Object.prototype.hasOwnProperty; var _toString = Object.prototype.toString; @@ -110190,13 +111293,13 @@ module.exports = new Type('tag:yaml.org,2002:omap', { /***/ }), -/***/ 96860: +/***/ 76039: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -var Type = __nccwpck_require__(6073); +var Type = __nccwpck_require__(30967); var _toString = Object.prototype.toString; @@ -110251,13 +111354,13 @@ module.exports = new Type('tag:yaml.org,2002:pairs', { /***/ }), -/***/ 67283: +/***/ 5490: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -var Type = __nccwpck_require__(6073); +var Type = __nccwpck_require__(30967); module.exports = new Type('tag:yaml.org,2002:seq', { kind: 'sequence', @@ -110267,13 +111370,13 @@ module.exports = new Type('tag:yaml.org,2002:seq', { /***/ }), -/***/ 79548: +/***/ 69237: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -var Type = __nccwpck_require__(6073); +var Type = __nccwpck_require__(30967); var _hasOwnProperty = Object.prototype.hasOwnProperty; @@ -110304,13 +111407,13 @@ module.exports = new Type('tag:yaml.org,2002:set', { /***/ }), -/***/ 23619: +/***/ 52672: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -var Type = __nccwpck_require__(6073); +var Type = __nccwpck_require__(30967); module.exports = new Type('tag:yaml.org,2002:str', { kind: 'scalar', @@ -110320,13 +111423,13 @@ module.exports = new Type('tag:yaml.org,2002:str', { /***/ }), -/***/ 99212: +/***/ 83714: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -var Type = __nccwpck_require__(6073); +var Type = __nccwpck_require__(30967); var YAML_DATE_REGEXP = new RegExp( '^([0-9][0-9][0-9][0-9])' + // [1] year @@ -117577,15 +118680,15 @@ Object.defineProperty(module.exports, "pool", ({ /***/ ((module) => { "use strict"; - - -module.exports = { - READ_UNCOMMITTED: 0x01, - READ_COMMITTED: 0x02, - REPEATABLE_READ: 0x03, - SERIALIZABLE: 0x04, - SNAPSHOT: 0x05 -} + + +module.exports = { + READ_UNCOMMITTED: 0x01, + READ_COMMITTED: 0x02, + REPEATABLE_READ: 0x03, + SERIALIZABLE: 0x04, + SNAPSHOT: 0x05 +} /***/ }), @@ -121326,7 +122429,7 @@ PoolSelector.ORDER = function PoolSelectorOrder() { /***/ 37806: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -var Buffer = __nccwpck_require__(1206).Buffer; +var Buffer = __nccwpck_require__(21867).Buffer; var Crypto = __nccwpck_require__(76417); var Auth = exports; @@ -121552,7 +122655,7 @@ var BUFFER_ALLOC_SIZE = Math.pow(2, 8); // Don't panic: Good enough to represent byte values up to 8192 TB var IEEE_754_BINARY_64_PRECISION = Math.pow(2, 53); var MAX_PACKET_LENGTH = Math.pow(2, 24) - 1; -var Buffer = __nccwpck_require__(1206).Buffer; +var Buffer = __nccwpck_require__(21867).Buffer; module.exports = PacketWriter; function PacketWriter() { @@ -121765,7 +122868,7 @@ PacketWriter.prototype._allocate = function _allocate(bytes) { var PacketHeader = __nccwpck_require__(32114); var BigNumber = __nccwpck_require__(87558); -var Buffer = __nccwpck_require__(1206).Buffer; +var Buffer = __nccwpck_require__(21867).Buffer; var BufferList = __nccwpck_require__(87768); var MAX_PACKET_LENGTH = Math.pow(2, 24) - 1; @@ -127238,7 +128341,7 @@ AuthSwitchResponsePacket.prototype.write = function write(writer) { /***/ 72948: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var Buffer = __nccwpck_require__(1206).Buffer; +var Buffer = __nccwpck_require__(21867).Buffer; module.exports = ClientAuthenticationPacket; function ClientAuthenticationPacket(options) { @@ -127634,7 +128737,7 @@ FieldPacket.prototype.write = function(writer) { /***/ 38230: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var Buffer = __nccwpck_require__(1206).Buffer; +var Buffer = __nccwpck_require__(21867).Buffer; var Client = __nccwpck_require__(5427); module.exports = HandshakeInitializationPacket; @@ -128375,7 +129478,7 @@ var Packets = __nccwpck_require__(87628); var ResultSet = __nccwpck_require__(18774); var Sequence = __nccwpck_require__(80401); var ServerStatus = __nccwpck_require__(64563); -var Readable = __nccwpck_require__(61017); +var Readable = __nccwpck_require__(51642); var Util = __nccwpck_require__(31669); module.exports = Query; @@ -128831,24459 +129934,25798 @@ exports.Statistics = __nccwpck_require__(21827); /***/ }), -/***/ 26637: +/***/ 34266: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. -// a duplex stream is just a stream that is both readable and writable. -// Since JS doesn't have multiple prototypal inheritance, this class -// prototypally inherits from Readable, and then parasitically from -// Writable. - - - -/**/ - -var pna = __nccwpck_require__(47810); -/**/ - -/**/ -var objectKeys = Object.keys || function (obj) { - var keys = []; - for (var key in obj) { - keys.push(key); - }return keys; -}; -/**/ - -module.exports = Duplex; - -/**/ -var util = Object.create(__nccwpck_require__(95898)); -util.inherits = __nccwpck_require__(44124); -/**/ - -var Readable = __nccwpck_require__(78680); -var Writable = __nccwpck_require__(81008); +const Duplex = __nccwpck_require__(92413).Duplex; -util.inherits(Duplex, Readable); +const kCallback = Symbol('Callback'); +const kOtherSide = Symbol('Other'); -{ - // avoid scope creep, the keys array can then be collected - var keys = objectKeys(Writable.prototype); - for (var v = 0; v < keys.length; v++) { - var method = keys[v]; - if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; +class DuplexSocket extends Duplex { + constructor(options) { + super(options); + this[kCallback] = null; + this[kOtherSide] = null; } -} - -function Duplex(options) { - if (!(this instanceof Duplex)) return new Duplex(options); - - Readable.call(this, options); - Writable.call(this, options); - - if (options && options.readable === false) this.readable = false; - if (options && options.writable === false) this.writable = false; - - this.allowHalfOpen = true; - if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; - - this.once('end', onend); -} - -Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function () { - return this._writableState.highWaterMark; + _read() { + const callback = this[kCallback]; + if (callback) { + this[kCallback] = null; + callback(); + } } -}); -// the no-half-open enforcer -function onend() { - // if we allow half-open state, or if the writable side ended, - // then we're ok. - if (this.allowHalfOpen || this._writableState.ended) return; - - // no more data can be written. - // But allow more writes to happen in this tick. - pna.nextTick(onEndNT, this); -} + _write(chunk, encoding, callback) { + this[kOtherSide][kCallback] = callback; + this[kOtherSide].push(chunk); + } -function onEndNT(self) { - self.end(); + _final(callback) { + this[kOtherSide].on('end', callback); + this[kOtherSide].push(null); + } } -Object.defineProperty(Duplex.prototype, 'destroyed', { - get: function () { - if (this._readableState === undefined || this._writableState === undefined) { - return false; - } - return this._readableState.destroyed && this._writableState.destroyed; - }, - set: function (value) { - // we ignore the value if the stream - // has not been initialized yet - if (this._readableState === undefined || this._writableState === undefined) { - return; - } - - // backward compatibility, the user is explicitly - // managing destroyed - this._readableState.destroyed = value; - this._writableState.destroyed = value; +class DuplexPair { + constructor(options) { + this.socket1 = new DuplexSocket(options); + this.socket2 = new DuplexSocket(options); + this.socket1[kOtherSide] = this.socket2; + this.socket2[kOtherSide] = this.socket1; } -}); +} -Duplex.prototype._destroy = function (err, cb) { - this.push(null); - this.end(); +module.exports = DuplexPair; - pna.nextTick(cb, err); -}; /***/ }), -/***/ 6671: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 83533: +/***/ ((module, exports, __nccwpck_require__) => { "use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -// a passthrough stream. -// basically just the most minimal sort of Transform stream. -// Every written chunk gets output as-is. +/** + * Module exports. + */ -module.exports = PassThrough; +module.exports = exports; -var Transform = __nccwpck_require__(6500); +/** + * Module dependencies. + */ -/**/ -var util = Object.create(__nccwpck_require__(95898)); -util.inherits = __nccwpck_require__(44124); -/**/ +var fs = __nccwpck_require__(35747); +var path = __nccwpck_require__(85622); +var nopt = __nccwpck_require__(41743); +var log = __nccwpck_require__(64314); +log.disableProgress(); +var napi = __nccwpck_require__(10480); -util.inherits(PassThrough, Transform); +var EE = __nccwpck_require__(28614).EventEmitter; +var inherits = __nccwpck_require__(31669).inherits; +var commands = [ + 'clean', + 'install', + 'reinstall', + 'build', + 'rebuild', + 'package', + 'testpackage', + 'publish', + 'unpublish', + 'info', + 'testbinary', + 'reveal', + 'configure' + ]; +var aliases = {}; -function PassThrough(options) { - if (!(this instanceof PassThrough)) return new PassThrough(options); +// differentiate node-pre-gyp's logs from npm's +log.heading = 'node-pre-gyp'; - Transform.call(this, options); -} +exports.find = __nccwpck_require__(92800).find; -PassThrough.prototype._transform = function (chunk, encoding, cb) { - cb(null, chunk); -}; +function Run() { + var self = this; -/***/ }), + this.commands = {}; -/***/ 78680: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + commands.forEach(function (command) { + self.commands[command] = function (argv, callback) { + log.verbose('command', command, argv); + return require('./' + command)(self, argv, callback); + }; + }); +} +inherits(Run, EE); +exports.Run = Run; +var proto = Run.prototype; -"use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. +/** + * Export the contents of the package.json. + */ +proto.package = __nccwpck_require__(16398); +/** + * nopt configuration definitions + */ -/**/ +proto.configDefs = { + help: Boolean, // everywhere + arch: String, // 'configure' + debug: Boolean, // 'build' + directory: String, // bin + proxy: String, // 'install' + loglevel: String, // everywhere +}; -var pna = __nccwpck_require__(47810); -/**/ +/** + * nopt shorthands + */ -module.exports = Readable; +proto.shorthands = { + release: '--no-debug', + C: '--directory', + debug: '--debug', + j: '--jobs', + silent: '--loglevel=silent', + silly: '--loglevel=silly', + verbose: '--loglevel=verbose', +}; -/**/ -var isArray = __nccwpck_require__(20893); -/**/ +/** + * expose the command aliases for the bin file to use. + */ -/**/ -var Duplex; -/**/ +proto.aliases = aliases; -Readable.ReadableState = ReadableState; +/** + * Parses the given argv array and sets the 'opts', + * 'argv' and 'command' properties. + */ -/**/ -var EE = __nccwpck_require__(28614).EventEmitter; +proto.parseArgv = function parseOpts (argv) { + this.opts = nopt(this.configDefs, this.shorthands, argv); + this.argv = this.opts.argv.remain.slice(); + var commands = this.todo = []; -var EElistenerCount = function (emitter, type) { - return emitter.listeners(type).length; -}; -/**/ + // create a copy of the argv array with aliases mapped + argv = this.argv.map(function (arg) { + // is this an alias? + if (arg in this.aliases) { + arg = this.aliases[arg]; + } + return arg; + }, this); -/**/ -var Stream = __nccwpck_require__(78743); -/**/ + // process the mapped args into "command" objects ("name" and "args" props) + argv.slice().forEach(function (arg) { + if (arg in this.commands) { + var args = argv.splice(0, argv.indexOf(arg)); + argv.shift(); + if (commands.length > 0) { + commands[commands.length - 1].args = args; + } + commands.push({ name: arg, args: [] }); + } + }, this); + if (commands.length > 0) { + commands[commands.length - 1].args = argv.splice(0); + } -/**/ + // expand commands entries for multiple napi builds + var dir = this.opts.directory; + if (dir == null) dir = process.cwd(); + var package_json = JSON.parse(fs.readFileSync(path.join(dir,'package.json'))); -var Buffer = __nccwpck_require__(1206).Buffer; -var OurUint8Array = global.Uint8Array || function () {}; -function _uint8ArrayToBuffer(chunk) { - return Buffer.from(chunk); -} -function _isUint8Array(obj) { - return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; -} + this.todo = napi.expand_commands (package_json, this.opts, commands); -/**/ + // support for inheriting config env variables from npm + var npm_config_prefix = 'npm_config_'; + Object.keys(process.env).forEach(function (name) { + if (name.indexOf(npm_config_prefix) !== 0) return; + var val = process.env[name]; + if (name === npm_config_prefix + 'loglevel') { + log.level = val; + } else { + // add the user-defined options to the config + name = name.substring(npm_config_prefix.length); + // avoid npm argv clobber already present args + // which avoids problem of 'npm test' calling + // script that runs unique npm install commands + if (name === 'argv') { + if (this.opts.argv && + this.opts.argv.remain && + this.opts.argv.remain.length) { + // do nothing + } else { + this.opts[name] = val; + } + } else { + this.opts[name] = val; + } + } + }, this); -/**/ -var util = Object.create(__nccwpck_require__(95898)); -util.inherits = __nccwpck_require__(44124); -/**/ + if (this.opts.loglevel) { + log.level = this.opts.loglevel; + } + log.resume(); +}; -/**/ -var debugUtil = __nccwpck_require__(31669); -var debug = void 0; -if (debugUtil && debugUtil.debuglog) { - debug = debugUtil.debuglog('stream'); -} else { - debug = function () {}; -} -/**/ +/** + * Returns the usage instructions for node-pre-gyp. + */ -var BufferList = __nccwpck_require__(42480); -var destroyImpl = __nccwpck_require__(13166); -var StringDecoder; +proto.usage = function usage () { + var str = [ + '', + ' Usage: node-pre-gyp [options]', + '', + ' where is one of:', + commands.map(function (c) { + return ' - ' + c + ' - ' + require('./' + c).usage; + }).join('\n'), + '', + 'node-pre-gyp@' + this.version + ' ' + path.resolve(__dirname, '..'), + 'node@' + process.versions.node + ].join('\n'); + return str; +}; -util.inherits(Readable, Stream); +/** + * Version number getter. + */ -var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; +Object.defineProperty(proto, 'version', { + get: function () { + return this.package.version; + }, + enumerable: true +}); -function prependListener(emitter, event, fn) { - // Sadly this is not cacheable as some libraries bundle their own - // event emitter implementation with them. - if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); - // This is a hack to make sure that our error handler is attached before any - // userland ones. NEVER DO THIS. This is here only because this code needs - // to continue to work with older versions of Node.js that do not include - // the prependListener() method. The goal is to eventually remove this hack. - if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; -} -function ReadableState(options, stream) { - Duplex = Duplex || __nccwpck_require__(26637); +/***/ }), - options = options || {}; +/***/ 92800: +/***/ ((module, exports, __nccwpck_require__) => { - // Duplex streams are both readable and writable, but share - // the same options object. - // However, some cases require setting options to different - // values for the readable and the writable sides of the duplex stream. - // These options can be provided separately as readableXXX and writableXXX. - var isDuplex = stream instanceof Duplex; +"use strict"; - // object stream flag. Used to make read(n) ignore n and to - // make all the buffer merging and length checks go away - this.objectMode = !!options.objectMode; - if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; +var versioning = __nccwpck_require__(10887); +var napi = __nccwpck_require__(10480); +var existsSync = __nccwpck_require__(35747).existsSync || __nccwpck_require__(85622).existsSync; +var path = __nccwpck_require__(85622); - // the point at which it stops calling _read() to fill the buffer - // Note: 0 is a valid value, means "don't call _read preemptively ever" - var hwm = options.highWaterMark; - var readableHwm = options.readableHighWaterMark; - var defaultHwm = this.objectMode ? 16 : 16 * 1024; +module.exports = exports; - if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm; +exports.usage = 'Finds the require path for the node-pre-gyp installed module'; - // cast to ints. - this.highWaterMark = Math.floor(this.highWaterMark); +exports.validate = function(package_json,opts) { + versioning.validate_config(package_json,opts); +}; - // A linked list is used to store data chunks instead of an array because the - // linked list can remove elements from the beginning faster than - // array.shift() - this.buffer = new BufferList(); - this.length = 0; - this.pipes = null; - this.pipesCount = 0; - this.flowing = null; - this.ended = false; - this.endEmitted = false; - this.reading = false; +exports.find = function(package_json_path,opts) { + if (!existsSync(package_json_path)) { + throw new Error("package.json does not exist at " + package_json_path); + } + var package_json = require(package_json_path); + versioning.validate_config(package_json,opts); + var napi_build_version; + if (napi.get_napi_build_versions (package_json, opts)) { + napi_build_version = napi.get_best_napi_build_version(package_json, opts); + } + opts = opts || {}; + if (!opts.module_root) opts.module_root = path.dirname(package_json_path); + var meta = versioning.evaluate(package_json,opts,napi_build_version); + return meta.module; +}; - // a flag to be able to tell if the event 'readable'/'data' is emitted - // immediately, or on a later tick. We set this to true at first, because - // any actions that shouldn't happen until "later" should generally also - // not happen before the first read call. - this.sync = true; - // whenever we return null, then we set a flag to say - // that we're awaiting a 'readable' event emission. - this.needReadable = false; - this.emittedReadable = false; - this.readableListening = false; - this.resumeScheduled = false; +/***/ }), - // has it been destroyed - this.destroyed = false; +/***/ 10480: +/***/ ((module, exports, __nccwpck_require__) => { - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = options.defaultEncoding || 'utf8'; +"use strict"; - // the number of writers that are awaiting a drain event in .pipe()s - this.awaitDrain = 0; - // if true, a maybeReadMore has been scheduled - this.readingMore = false; +var fs = __nccwpck_require__(35747); +var rm = __nccwpck_require__(99120); +var log = __nccwpck_require__(64314); - this.decoder = null; - this.encoding = null; - if (options.encoding) { - if (!StringDecoder) StringDecoder = __nccwpck_require__(7545)/* .StringDecoder */ .s; - this.decoder = new StringDecoder(options.encoding); - this.encoding = options.encoding; - } -} +module.exports = exports; -function Readable(options) { - Duplex = Duplex || __nccwpck_require__(26637); +var versionArray = process.version + .substr(1) + .replace(/-.*$/, '') + .split('.') + .map(function(item) { + return +item; + }); - if (!(this instanceof Readable)) return new Readable(options); +var napi_multiple_commands = [ + 'build', + 'clean', + 'configure', + 'package', + 'publish', + 'reveal', + 'testbinary', + 'testpackage', + 'unpublish' +]; - this._readableState = new ReadableState(options, this); +var napi_build_version_tag = 'napi_build_version='; - // legacy - this.readable = true; +module.exports.get_napi_version = function(target) { // target may be undefined + // returns the non-zero numeric napi version or undefined if napi is not supported. + // correctly supporting target requires an updated cross-walk + var version = process.versions.napi; // can be undefined + if (!version) { // this code should never need to be updated + if (versionArray[0] === 9 && versionArray[1] >= 3) version = 2; // 9.3.0+ + else if (versionArray[0] === 8) version = 1; // 8.0.0+ + } + return version; +}; - if (options) { - if (typeof options.read === 'function') this._read = options.read; +module.exports.get_napi_version_as_string = function(target) { + // returns the napi version as a string or an empty string if napi is not supported. + var version = module.exports.get_napi_version(target); + return version ? ''+version : ''; +}; - if (typeof options.destroy === 'function') this._destroy = options.destroy; - } +module.exports.validate_package_json = function(package_json, opts) { // throws Error - Stream.call(this); -} + var binary = package_json.binary; + var module_path_ok = pathOK(binary.module_path); + var remote_path_ok = pathOK(binary.remote_path); + var package_name_ok = pathOK(binary.package_name); + var napi_build_versions = module.exports.get_napi_build_versions(package_json,opts,true); + var napi_build_versions_raw = module.exports.get_napi_build_versions_raw(package_json); -Object.defineProperty(Readable.prototype, 'destroyed', { - get: function () { - if (this._readableState === undefined) { - return false; - } - return this._readableState.destroyed; - }, - set: function (value) { - // we ignore the value if the stream - // has not been initialized yet - if (!this._readableState) { - return; - } + if (napi_build_versions) { + napi_build_versions.forEach(function(napi_build_version){ + if (!(parseInt(napi_build_version,10) === napi_build_version && napi_build_version > 0)) { + throw new Error("All values specified in napi_versions must be positive integers."); + } + }); + } - // backward compatibility, the user is explicitly - // managing destroyed - this._readableState.destroyed = value; - } -}); + if (napi_build_versions && (!module_path_ok || (!remote_path_ok && !package_name_ok))) { + throw new Error("When napi_versions is specified; module_path and either remote_path or " + + "package_name must contain the substitution string '{napi_build_version}`."); + } -Readable.prototype.destroy = destroyImpl.destroy; -Readable.prototype._undestroy = destroyImpl.undestroy; -Readable.prototype._destroy = function (err, cb) { - this.push(null); - cb(err); -}; + if ((module_path_ok || remote_path_ok || package_name_ok) && !napi_build_versions_raw) { + throw new Error("When the substitution string '{napi_build_version}` is specified in " + + "module_path, remote_path, or package_name; napi_versions must also be specified."); + } -// Manually shove something into the read() buffer. -// This returns true if the highWaterMark has not been hit yet, -// similar to how Writable.write() returns true if you should -// write() some more. -Readable.prototype.push = function (chunk, encoding) { - var state = this._readableState; - var skipChunkCheck; + if (napi_build_versions && !module.exports.get_best_napi_build_version(package_json, opts) && + module.exports.build_napi_only(package_json)) { + throw new Error( + 'The N-API version of this Node instance is ' + module.exports.get_napi_version(opts ? opts.target : undefined) + '. ' + + 'This module supports N-API version(s) ' + module.exports.get_napi_build_versions_raw(package_json) + '. ' + + 'This Node instance cannot run this module.'); + } - if (!state.objectMode) { - if (typeof chunk === 'string') { - encoding = encoding || state.defaultEncoding; - if (encoding !== state.encoding) { - chunk = Buffer.from(chunk, encoding); - encoding = ''; - } - skipChunkCheck = true; - } - } else { - skipChunkCheck = true; - } + if (napi_build_versions_raw && !napi_build_versions && module.exports.build_napi_only(package_json)) { + throw new Error( + 'The N-API version of this Node instance is ' + module.exports.get_napi_version(opts ? opts.target : undefined) + '. ' + + 'This module supports N-API version(s) ' + module.exports.get_napi_build_versions_raw(package_json) + '. ' + + 'This Node instance cannot run this module.'); + } - return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); }; -// Unshift should *always* be something directly out of read() -Readable.prototype.unshift = function (chunk) { - return readableAddChunk(this, chunk, null, true, false); +function pathOK (path) { + return path && (path.indexOf('{napi_build_version}') !== -1 || path.indexOf('{node_napi_label}') !== -1); +} + +module.exports.expand_commands = function(package_json, opts, commands) { + var expanded_commands = []; + var napi_build_versions = module.exports.get_napi_build_versions(package_json, opts); + commands.forEach(function(command){ + if (napi_build_versions && command.name === 'install') { + var napi_build_version = module.exports.get_best_napi_build_version(package_json, opts); + var args = napi_build_version ? [ napi_build_version_tag+napi_build_version ] : [ ]; + expanded_commands.push ({ name: command.name, args: args }); + } else if (napi_build_versions && napi_multiple_commands.indexOf(command.name) !== -1) { + napi_build_versions.forEach(function(napi_build_version){ + var args = command.args.slice(); + args.push (napi_build_version_tag+napi_build_version); + expanded_commands.push ({ name: command.name, args: args }); + }); + } else { + expanded_commands.push (command); + } + }); + return expanded_commands; }; -function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { - var state = stream._readableState; - if (chunk === null) { - state.reading = false; - onEofChunk(stream, state); - } else { - var er; - if (!skipChunkCheck) er = chunkInvalid(state, chunk); - if (er) { - stream.emit('error', er); - } else if (state.objectMode || chunk && chunk.length > 0) { - if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { - chunk = _uint8ArrayToBuffer(chunk); - } +module.exports.get_napi_build_versions = function(package_json, opts, warnings) { // opts may be undefined + var napi_build_versions = []; + var supported_napi_version = module.exports.get_napi_version(opts ? opts.target : undefined); + // remove duplicates, verify each napi version can actaully be built + if (package_json.binary && package_json.binary.napi_versions) { + package_json.binary.napi_versions.forEach(function(napi_version) { + var duplicated = napi_build_versions.indexOf(napi_version) !== -1; + if (!duplicated && supported_napi_version && napi_version <= supported_napi_version) { + napi_build_versions.push(napi_version); + } else if (warnings && !duplicated && supported_napi_version) { + log.info('This Node instance does not support builds for N-API version', napi_version); + } + }); + } + if (opts && opts["build-latest-napi-version-only"]) { + var latest_version = 0; + napi_build_versions.forEach(function(napi_version) { + if (napi_version > latest_version) latest_version = napi_version; + }); + napi_build_versions = latest_version ? [ latest_version ] : []; + } + return napi_build_versions.length ? napi_build_versions : undefined; +}; - if (addToFront) { - if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true); - } else if (state.ended) { - stream.emit('error', new Error('stream.push() after EOF')); - } else { - state.reading = false; - if (state.decoder && !encoding) { - chunk = state.decoder.write(chunk); - if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); - } else { - addChunk(stream, state, chunk, false); - } - } - } else if (!addToFront) { - state.reading = false; - } - } +module.exports.get_napi_build_versions_raw = function(package_json) { + var napi_build_versions = []; + // remove duplicates + if (package_json.binary && package_json.binary.napi_versions) { + package_json.binary.napi_versions.forEach(function(napi_version) { + if (napi_build_versions.indexOf(napi_version) === -1) { + napi_build_versions.push(napi_version); + } + }); + } + return napi_build_versions.length ? napi_build_versions : undefined; +}; - return needMoreData(state); -} +module.exports.get_command_arg = function(napi_build_version) { + return napi_build_version_tag + napi_build_version; +}; -function addChunk(stream, state, chunk, addToFront) { - if (state.flowing && state.length === 0 && !state.sync) { - stream.emit('data', chunk); - stream.read(0); - } else { - // update the buffer info. - state.length += state.objectMode ? 1 : chunk.length; - if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); +module.exports.get_napi_build_version_from_command_args = function(command_args) { + for (var i = 0; i < command_args.length; i++) { + var arg = command_args[i]; + if (arg.indexOf(napi_build_version_tag) === 0) { + return parseInt(arg.substr(napi_build_version_tag.length),10); + } + } + return undefined; +}; - if (state.needReadable) emitReadable(stream); - } - maybeReadMore(stream, state); -} +module.exports.swap_build_dir_out = function(napi_build_version) { + if (napi_build_version) { + rm.sync(module.exports.get_build_dir(napi_build_version)); + fs.renameSync('build', module.exports.get_build_dir(napi_build_version)); + } +}; -function chunkInvalid(state, chunk) { - var er; - if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { - er = new TypeError('Invalid non-string/buffer chunk'); - } - return er; -} +module.exports.swap_build_dir_in = function(napi_build_version) { + if (napi_build_version) { + rm.sync('build'); + fs.renameSync(module.exports.get_build_dir(napi_build_version), 'build'); + } +}; -// if it's past the high water mark, we can push in some more. -// Also, if we have no data yet, we can stand some -// more bytes. This is to work around cases where hwm=0, -// such as the repl. Also, if the push() triggered a -// readable event, and the user called read(largeNumber) such that -// needReadable was set, then we ought to push more, so that another -// 'readable' event will be triggered. -function needMoreData(state) { - return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); -} +module.exports.get_build_dir = function(napi_build_version) { + return 'build-tmp-napi-v'+napi_build_version; +}; -Readable.prototype.isPaused = function () { - return this._readableState.flowing === false; +module.exports.get_best_napi_build_version = function(package_json, opts) { + var best_napi_build_version = 0; + var napi_build_versions = module.exports.get_napi_build_versions (package_json, opts); + if (napi_build_versions) { + var our_napi_version = module.exports.get_napi_version(opts ? opts.target : undefined); + napi_build_versions.forEach(function(napi_build_version){ + if (napi_build_version > best_napi_build_version && + napi_build_version <= our_napi_version) { + best_napi_build_version = napi_build_version; + } + }); + } + return best_napi_build_version === 0 ? undefined : best_napi_build_version; }; -// backwards compatibility. -Readable.prototype.setEncoding = function (enc) { - if (!StringDecoder) StringDecoder = __nccwpck_require__(7545)/* .StringDecoder */ .s; - this._readableState.decoder = new StringDecoder(enc); - this._readableState.encoding = enc; - return this; +module.exports.build_napi_only = function(package_json) { + return package_json.binary && package_json.binary.package_name && + package_json.binary.package_name.indexOf('{node_napi_label}') === -1; }; -// Don't raise the hwm > 8MB -var MAX_HWM = 0x800000; -function computeNewHighWaterMark(n) { - if (n >= MAX_HWM) { - n = MAX_HWM; - } else { - // Get the next highest power of 2 to prevent increasing hwm excessively in - // tiny amounts - n--; - n |= n >>> 1; - n |= n >>> 2; - n |= n >>> 4; - n |= n >>> 8; - n |= n >>> 16; - n++; - } - return n; -} +/***/ }), -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function howMuchToRead(n, state) { - if (n <= 0 || state.length === 0 && state.ended) return 0; - if (state.objectMode) return 1; - if (n !== n) { - // Only flow one buffer at a time - if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; - } - // If we're asking for more than the current hwm, then raise the hwm. - if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); - if (n <= state.length) return n; - // Don't have enough - if (!state.ended) { - state.needReadable = true; - return 0; - } - return state.length; -} +/***/ 10887: +/***/ ((module, exports, __nccwpck_require__) => { -// you can override either this method, or the async _read(n) below. -Readable.prototype.read = function (n) { - debug('read', n); - n = parseInt(n, 10); - var state = this._readableState; - var nOrig = n; +"use strict"; - if (n !== 0) state.emittedReadable = false; - // if we're doing read(0) to trigger a readable event, but we - // already have a bunch of data in the buffer, then just trigger - // the 'readable' event and move on. - if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { - debug('read: emitReadable', state.length, state.ended); - if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); - return null; - } +module.exports = exports; - n = howMuchToRead(n, state); +var path = __nccwpck_require__(85622); +var semver = __nccwpck_require__(2964); +var url = __nccwpck_require__(78835); +var detect_libc = __nccwpck_require__(34889); +var napi = __nccwpck_require__(10480); - // if we've ended, and we're now clear, then finish it up. - if (n === 0 && state.ended) { - if (state.length === 0) endReadable(this); - return null; - } +var abi_crosswalk; - // All the actual chunk generation logic needs to be - // *below* the call to _read. The reason is that in certain - // synthetic stream cases, such as passthrough streams, _read - // may be a completely synchronous operation which may change - // the state of the read buffer, providing enough data when - // before there was *not* enough. - // - // So, the steps are: - // 1. Figure out what the state of things will be after we do - // a read from the buffer. - // - // 2. If that resulting state will trigger a _read, then call _read. - // Note that this may be asynchronous, or synchronous. Yes, it is - // deeply ugly to write APIs this way, but that still doesn't mean - // that the Readable class should behave improperly, as streams are - // designed to be sync/async agnostic. - // Take note if the _read call is sync or async (ie, if the read call - // has returned yet), so that we know whether or not it's safe to emit - // 'readable' etc. - // - // 3. Actually pull the requested chunks out of the buffer and return. +// This is used for unit testing to provide a fake +// ABI crosswalk that emulates one that is not updated +// for the current version +if (process.env.NODE_PRE_GYP_ABI_CROSSWALK) { + abi_crosswalk = require(process.env.NODE_PRE_GYP_ABI_CROSSWALK); +} else { + abi_crosswalk = __nccwpck_require__(60282); +} - // if we need a readable event, then we need to do some reading. - var doRead = state.needReadable; - debug('need readable', doRead); +var major_versions = {}; +Object.keys(abi_crosswalk).forEach(function(v) { + var major = v.split('.')[0]; + if (!major_versions[major]) { + major_versions[major] = v; + } +}); - // if we currently have less than the highWaterMark, then also read some - if (state.length === 0 || state.length - n < state.highWaterMark) { - doRead = true; - debug('length less than watermark', doRead); - } +function get_electron_abi(runtime, target_version) { + if (!runtime) { + throw new Error("get_electron_abi requires valid runtime arg"); + } + if (typeof target_version === 'undefined') { + // erroneous CLI call + throw new Error("Empty target version is not supported if electron is the target."); + } + // Electron guarantees that patch version update won't break native modules. + var sem_ver = semver.parse(target_version); + return runtime + '-v' + sem_ver.major + '.' + sem_ver.minor; +} +module.exports.get_electron_abi = get_electron_abi; - // however, if we've ended, then there's no point, and if we're already - // reading, then it's unnecessary. - if (state.ended || state.reading) { - doRead = false; - debug('reading or ended', doRead); - } else if (doRead) { - debug('do read'); - state.reading = true; - state.sync = true; - // if the length is currently zero, then we *need* a readable event. - if (state.length === 0) state.needReadable = true; - // call internal read method - this._read(state.highWaterMark); - state.sync = false; - // If _read pushed data synchronously, then `reading` will be false, - // and we need to re-evaluate how much data we can return to the user. - if (!state.reading) n = howMuchToRead(nOrig, state); - } +function get_node_webkit_abi(runtime, target_version) { + if (!runtime) { + throw new Error("get_node_webkit_abi requires valid runtime arg"); + } + if (typeof target_version === 'undefined') { + // erroneous CLI call + throw new Error("Empty target version is not supported if node-webkit is the target."); + } + return runtime + '-v' + target_version; +} +module.exports.get_node_webkit_abi = get_node_webkit_abi; - var ret; - if (n > 0) ret = fromList(n, state);else ret = null; +function get_node_abi(runtime, versions) { + if (!runtime) { + throw new Error("get_node_abi requires valid runtime arg"); + } + if (!versions) { + throw new Error("get_node_abi requires valid process.versions object"); + } + var sem_ver = semver.parse(versions.node); + if (sem_ver.major === 0 && sem_ver.minor % 2) { // odd series + // https://github.com/mapbox/node-pre-gyp/issues/124 + return runtime+'-v'+versions.node; + } else { + // process.versions.modules added in >= v0.10.4 and v0.11.7 + // https://github.com/joyent/node/commit/ccabd4a6fa8a6eb79d29bc3bbe9fe2b6531c2d8e + return versions.modules ? runtime+'-v' + (+versions.modules) : + 'v8-' + versions.v8.split('.').slice(0,2).join('.'); + } +} +module.exports.get_node_abi = get_node_abi; - if (ret === null) { - state.needReadable = true; - n = 0; - } else { - state.length -= n; - } +function get_runtime_abi(runtime, target_version) { + if (!runtime) { + throw new Error("get_runtime_abi requires valid runtime arg"); + } + if (runtime === 'node-webkit') { + return get_node_webkit_abi(runtime, target_version || process.versions['node-webkit']); + } else if (runtime === 'electron') { + return get_electron_abi(runtime, target_version || process.versions.electron); + } else { + if (runtime != 'node') { + throw new Error("Unknown Runtime: '" + runtime + "'"); + } + if (!target_version) { + return get_node_abi(runtime,process.versions); + } else { + var cross_obj; + // abi_crosswalk generated with ./scripts/abi_crosswalk.js + if (abi_crosswalk[target_version]) { + cross_obj = abi_crosswalk[target_version]; + } else { + var target_parts = target_version.split('.').map(function(i) { return +i; }); + if (target_parts.length != 3) { // parse failed + throw new Error("Unknown target version: " + target_version); + } + /* + The below code tries to infer the last known ABI compatible version + that we have recorded in the abi_crosswalk.json when an exact match + is not possible. The reasons for this to exist are complicated: - if (state.length === 0) { - // If we have nothing in the buffer, then we want to know - // as soon as we *do* get something into the buffer. - if (!state.ended) state.needReadable = true; + - We support passing --target to be able to allow developers to package binaries for versions of node + that are not the same one as they are running. This might also be used in combination with the + --target_arch or --target_platform flags to also package binaries for alternative platforms + - When --target is passed we can't therefore determine the ABI (process.versions.modules) from the node + version that is running in memory + - So, therefore node-pre-gyp keeps an "ABI crosswalk" (lib/util/abi_crosswalk.json) to be able to look + this info up for all versions + - But we cannot easily predict what the future ABI will be for released versions + - And node-pre-gyp needs to be a `bundledDependency` in apps that depend on it in order to work correctly + by being fully available at install time. + - So, the speed of node releases and the bundled nature of node-pre-gyp mean that a new node-pre-gyp release + need to happen for every node.js/io.js/node-webkit/nw.js/atom-shell/etc release that might come online if + you want the `--target` flag to keep working for the latest version + - Which is impractical ^^ + - Hence the below code guesses about future ABI to make the need to update node-pre-gyp less demanding. - // If we tried to read() past the EOF, then emit end on the next tick. - if (nOrig !== n && state.ended) endReadable(this); - } + In practice then you can have a dependency of your app like `node-sqlite3` that bundles a `node-pre-gyp` that + only knows about node v0.10.33 in the `abi_crosswalk.json` but target node v0.10.34 (which is assumed to be + ABI compatible with v0.10.33). - if (ret !== null) this.emit('data', ret); + TODO: use semver module instead of custom version parsing + */ + var major = target_parts[0]; + var minor = target_parts[1]; + var patch = target_parts[2]; + // io.js: yeah if node.js ever releases 1.x this will break + // but that is unlikely to happen: https://github.com/iojs/io.js/pull/253#issuecomment-69432616 + if (major === 1) { + // look for last release that is the same major version + // e.g. we assume io.js 1.x is ABI compatible with >= 1.0.0 + while (true) { + if (minor > 0) --minor; + if (patch > 0) --patch; + var new_iojs_target = '' + major + '.' + minor + '.' + patch; + if (abi_crosswalk[new_iojs_target]) { + cross_obj = abi_crosswalk[new_iojs_target]; + console.log('Warning: node-pre-gyp could not find exact match for ' + target_version); + console.log('Warning: but node-pre-gyp successfully choose ' + new_iojs_target + ' as ABI compatible target'); + break; + } + if (minor === 0 && patch === 0) { + break; + } + } + } else if (major >= 2) { + // look for last release that is the same major version + if (major_versions[major]) { + cross_obj = abi_crosswalk[major_versions[major]]; + console.log('Warning: node-pre-gyp could not find exact match for ' + target_version); + console.log('Warning: but node-pre-gyp successfully choose ' + major_versions[major] + ' as ABI compatible target'); + } + } else if (major === 0) { // node.js + if (target_parts[1] % 2 === 0) { // for stable/even node.js series + // look for the last release that is the same minor release + // e.g. we assume node 0.10.x is ABI compatible with >= 0.10.0 + while (--patch > 0) { + var new_node_target = '' + major + '.' + minor + '.' + patch; + if (abi_crosswalk[new_node_target]) { + cross_obj = abi_crosswalk[new_node_target]; + console.log('Warning: node-pre-gyp could not find exact match for ' + target_version); + console.log('Warning: but node-pre-gyp successfully choose ' + new_node_target + ' as ABI compatible target'); + break; + } + } + } + } + } + if (!cross_obj) { + throw new Error("Unsupported target version: " + target_version); + } + // emulate process.versions + var versions_obj = { + node: target_version, + v8: cross_obj.v8+'.0', + // abi_crosswalk uses 1 for node versions lacking process.versions.modules + // process.versions.modules added in >= v0.10.4 and v0.11.7 + modules: cross_obj.node_abi > 1 ? cross_obj.node_abi : undefined + }; + return get_node_abi(runtime, versions_obj); + } + } +} +module.exports.get_runtime_abi = get_runtime_abi; - return ret; -}; +var required_parameters = [ + 'module_name', + 'module_path', + 'host' +]; -function onEofChunk(stream, state) { - if (state.ended) return; - if (state.decoder) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) { - state.buffer.push(chunk); - state.length += state.objectMode ? 1 : chunk.length; +function validate_config(package_json,opts) { + var msg = package_json.name + ' package.json is not node-pre-gyp ready:\n'; + var missing = []; + if (!package_json.main) { + missing.push('main'); } - } - state.ended = true; - - // emit 'readable' now to make sure it gets picked up. - emitReadable(stream); + if (!package_json.version) { + missing.push('version'); + } + if (!package_json.name) { + missing.push('name'); + } + if (!package_json.binary) { + missing.push('binary'); + } + var o = package_json.binary; + required_parameters.forEach(function(p) { + if (missing.indexOf('binary') > -1) { + missing.pop('binary'); + } + if (!o || o[p] === undefined || o[p] === "") { + missing.push('binary.' + p); + } + }); + if (missing.length >= 1) { + throw new Error(msg+"package.json must declare these properties: \n" + missing.join('\n')); + } + if (o) { + // enforce https over http + var protocol = url.parse(o.host).protocol; + if (protocol === 'http:') { + throw new Error("'host' protocol ("+protocol+") is invalid - only 'https:' is accepted"); + } + } + napi.validate_package_json(package_json,opts); } -// Don't emit readable right away in sync mode, because this can trigger -// another read() call => stack overflow. This way, it might trigger -// a nextTick recursion warning, but that's not so bad. -function emitReadable(stream) { - var state = stream._readableState; - state.needReadable = false; - if (!state.emittedReadable) { - debug('emitReadable', state.flowing); - state.emittedReadable = true; - if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream); - } +module.exports.validate_config = validate_config; + +function eval_template(template,opts) { + Object.keys(opts).forEach(function(key) { + var pattern = '{'+key+'}'; + while (template.indexOf(pattern) > -1) { + template = template.replace(pattern,opts[key]); + } + }); + return template; } -function emitReadable_(stream) { - debug('emit readable'); - stream.emit('readable'); - flow(stream); +// url.resolve needs single trailing slash +// to behave correctly, otherwise a double slash +// may end up in the url which breaks requests +// and a lacking slash may not lead to proper joining +function fix_slashes(pathname) { + if (pathname.slice(-1) != '/') { + return pathname + '/'; + } + return pathname; } -// at this point, the user has presumably seen the 'readable' event, -// and called read() to consume some data. that may have triggered -// in turn another _read(n) call, in which case reading = true if -// it's in progress. -// However, if we're not ended, or reading, and the length < hwm, -// then go ahead and try to read some more preemptively. -function maybeReadMore(stream, state) { - if (!state.readingMore) { - state.readingMore = true; - pna.nextTick(maybeReadMore_, stream, state); - } +// remove double slashes +// note: path.normalize will not work because +// it will convert forward to back slashes +function drop_double_slashes(pathname) { + return pathname.replace(/\/\//g,'/'); } -function maybeReadMore_(stream, state) { - var len = state.length; - while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { - debug('maybeReadMore read 0'); - stream.read(0); - if (len === state.length) - // didn't get any data, stop spinning. - break;else len = state.length; - } - state.readingMore = false; +function get_process_runtime(versions) { + var runtime = 'node'; + if (versions['node-webkit']) { + runtime = 'node-webkit'; + } else if (versions.electron) { + runtime = 'electron'; + } + return runtime; } -// abstract method. to be overridden in specific implementation classes. -// call cb(er, data) where data is <= n in length. -// for virtual (non-string, non-buffer) streams, "length" is somewhat -// arbitrary, and perhaps not very meaningful. -Readable.prototype._read = function (n) { - this.emit('error', new Error('_read() is not implemented')); -}; +module.exports.get_process_runtime = get_process_runtime; -Readable.prototype.pipe = function (dest, pipeOpts) { - var src = this; - var state = this._readableState; +var default_package_name = '{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz'; +var default_remote_path = ''; - switch (state.pipesCount) { - case 0: - state.pipes = dest; - break; - case 1: - state.pipes = [state.pipes, dest]; - break; - default: - state.pipes.push(dest); - break; - } - state.pipesCount += 1; - debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); +module.exports.evaluate = function(package_json,options,napi_build_version) { + options = options || {}; + validate_config(package_json,options); // options is a suitable substitute for opts in this case + var v = package_json.version; + var module_version = semver.parse(v); + var runtime = options.runtime || get_process_runtime(process.versions); + var opts = { + name: package_json.name, + configuration: Boolean(options.debug) ? 'Debug' : 'Release', + debug: options.debug, + module_name: package_json.binary.module_name, + version: module_version.version, + prerelease: module_version.prerelease.length ? module_version.prerelease.join('.') : '', + build: module_version.build.length ? module_version.build.join('.') : '', + major: module_version.major, + minor: module_version.minor, + patch: module_version.patch, + runtime: runtime, + node_abi: get_runtime_abi(runtime,options.target), + node_abi_napi: napi.get_napi_version(options.target) ? 'napi' : get_runtime_abi(runtime,options.target), + napi_version: napi.get_napi_version(options.target), // non-zero numeric, undefined if unsupported + napi_build_version: napi_build_version || '', + node_napi_label: napi_build_version ? 'napi-v' + napi_build_version : get_runtime_abi(runtime,options.target), + target: options.target || '', + platform: options.target_platform || process.platform, + target_platform: options.target_platform || process.platform, + arch: options.target_arch || process.arch, + target_arch: options.target_arch || process.arch, + libc: options.target_libc || detect_libc.family || 'unknown', + module_main: package_json.main, + toolset : options.toolset || '' // address https://github.com/mapbox/node-pre-gyp/issues/119 + }; + // support host mirror with npm config `--{module_name}_binary_host_mirror` + // e.g.: https://github.com/node-inspector/v8-profiler/blob/master/package.json#L25 + // > npm install v8-profiler --profiler_binary_host_mirror=https://npm.taobao.org/mirrors/node-inspector/ + var host = process.env['npm_config_' + opts.module_name + '_binary_host_mirror'] || package_json.binary.host; + opts.host = fix_slashes(eval_template(host,opts)); + opts.module_path = eval_template(package_json.binary.module_path,opts); + // now we resolve the module_path to ensure it is absolute so that binding.gyp variables work predictably + if (options.module_root) { + // resolve relative to known module root: works for pre-binding require + opts.module_path = path.join(options.module_root,opts.module_path); + } else { + // resolve relative to current working directory: works for node-pre-gyp commands + opts.module_path = path.resolve(opts.module_path); + } + opts.module = path.join(opts.module_path,opts.module_name + '.node'); + opts.remote_path = package_json.binary.remote_path ? drop_double_slashes(fix_slashes(eval_template(package_json.binary.remote_path,opts))) : default_remote_path; + var package_name = package_json.binary.package_name ? package_json.binary.package_name : default_package_name; + opts.package_name = eval_template(package_name,opts); + opts.staged_tarball = path.join('build/stage',opts.remote_path,opts.package_name); + opts.hosted_path = url.resolve(opts.host,opts.remote_path); + opts.hosted_tarball = url.resolve(opts.hosted_path,opts.package_name); + return opts; +}; - var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; - var endFn = doEnd ? onend : unpipe; - if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn); +/***/ }), - dest.on('unpipe', onunpipe); - function onunpipe(readable, unpipeInfo) { - debug('onunpipe'); - if (readable === src) { - if (unpipeInfo && unpipeInfo.hasUnpiped === false) { - unpipeInfo.hasUnpiped = true; - cleanup(); - } - } - } +/***/ 41743: +/***/ ((module, exports, __nccwpck_require__) => { - function onend() { - debug('onend'); - dest.end(); - } +// info about each config option. - // when the dest drains, it reduces the awaitDrain counter - // on the source. This would be more elegant with a .once() - // handler in flow(), but adding and removing repeatedly is - // too slow. - var ondrain = pipeOnDrain(src); - dest.on('drain', ondrain); +var debug = process.env.DEBUG_NOPT || process.env.NOPT_DEBUG + ? function () { console.error.apply(console, arguments) } + : function () {} - var cleanedUp = false; - function cleanup() { - debug('cleanup'); - // cleanup event handlers once the pipe is broken - dest.removeListener('close', onclose); - dest.removeListener('finish', onfinish); - dest.removeListener('drain', ondrain); - dest.removeListener('error', onerror); - dest.removeListener('unpipe', onunpipe); - src.removeListener('end', onend); - src.removeListener('end', unpipe); - src.removeListener('data', ondata); +var url = __nccwpck_require__(78835) + , path = __nccwpck_require__(85622) + , Stream = __nccwpck_require__(92413).Stream + , abbrev = __nccwpck_require__(48566) + , osenv = __nccwpck_require__(84669) - cleanedUp = true; +module.exports = exports = nopt +exports.clean = clean - // if the reader is waiting for a drain event from this - // specific writer, then it would cause it to never start - // flowing again. - // So, if this is awaiting a drain, then we just call it now. - // If we don't know, then assume that we are waiting for one. - if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); +exports.typeDefs = + { String : { type: String, validate: validateString } + , Boolean : { type: Boolean, validate: validateBoolean } + , url : { type: url, validate: validateUrl } + , Number : { type: Number, validate: validateNumber } + , path : { type: path, validate: validatePath } + , Stream : { type: Stream, validate: validateStream } + , Date : { type: Date, validate: validateDate } } - // If the user pushes more data while we're writing to dest then we'll end up - // in ondata again. However, we only want to increase awaitDrain once because - // dest will only emit one 'drain' event for the multiple writes. - // => Introduce a guard on increasing awaitDrain. - var increasedAwaitDrain = false; - src.on('data', ondata); - function ondata(chunk) { - debug('ondata'); - increasedAwaitDrain = false; - var ret = dest.write(chunk); - if (false === ret && !increasedAwaitDrain) { - // If the user unpiped during `dest.write()`, it is possible - // to get stuck in a permanently paused state if that write - // also returned false. - // => Check whether `dest` is still a piping destination. - if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { - debug('false write response, pause', src._readableState.awaitDrain); - src._readableState.awaitDrain++; - increasedAwaitDrain = true; +function nopt (types, shorthands, args, slice) { + args = args || process.argv + types = types || {} + shorthands = shorthands || {} + if (typeof slice !== "number") slice = 2 + + debug(types, shorthands, args, slice) + + args = args.slice(slice) + var data = {} + , key + , argv = { + remain: [], + cooked: args, + original: args.slice(0) } - src.pause(); - } - } - // if the dest has an error, then stop piping into it. - // however, don't suppress the throwing behavior for this. - function onerror(er) { - debug('onerror', er); - unpipe(); - dest.removeListener('error', onerror); - if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); - } + parse(args, data, argv.remain, types, shorthands) + // now data is full + clean(data, types, exports.typeDefs) + data.argv = argv + Object.defineProperty(data.argv, 'toString', { value: function () { + return this.original.map(JSON.stringify).join(" ") + }, enumerable: false }) + return data +} - // Make sure our error handler is attached before userland ones. - prependListener(dest, 'error', onerror); +function clean (data, types, typeDefs) { + typeDefs = typeDefs || exports.typeDefs + var remove = {} + , typeDefault = [false, true, null, String, Array] - // Both close and finish should trigger unpipe, but only once. - function onclose() { - dest.removeListener('finish', onfinish); - unpipe(); - } - dest.once('close', onclose); - function onfinish() { - debug('onfinish'); - dest.removeListener('close', onclose); - unpipe(); - } - dest.once('finish', onfinish); + Object.keys(data).forEach(function (k) { + if (k === "argv") return + var val = data[k] + , isArray = Array.isArray(val) + , type = types[k] + if (!isArray) val = [val] + if (!type) type = typeDefault + if (type === Array) type = typeDefault.concat(Array) + if (!Array.isArray(type)) type = [type] - function unpipe() { - debug('unpipe'); - src.unpipe(dest); - } + debug("val=%j", val) + debug("types=", type) + val = val.map(function (val) { + // if it's an unknown value, then parse false/true/null/numbers/dates + if (typeof val === "string") { + debug("string %j", val) + val = val.trim() + if ((val === "null" && ~type.indexOf(null)) + || (val === "true" && + (~type.indexOf(true) || ~type.indexOf(Boolean))) + || (val === "false" && + (~type.indexOf(false) || ~type.indexOf(Boolean)))) { + val = JSON.parse(val) + debug("jsonable %j", val) + } else if (~type.indexOf(Number) && !isNaN(val)) { + debug("convert to number", val) + val = +val + } else if (~type.indexOf(Date) && !isNaN(Date.parse(val))) { + debug("convert to date", val) + val = new Date(val) + } + } - // tell the dest that it's being piped to - dest.emit('pipe', src); + if (!types.hasOwnProperty(k)) { + return val + } - // start the flow if it hasn't been started already. - if (!state.flowing) { - debug('pipe resume'); - src.resume(); - } + // allow `--no-blah` to set 'blah' to null if null is allowed + if (val === false && ~type.indexOf(null) && + !(~type.indexOf(false) || ~type.indexOf(Boolean))) { + val = null + } - return dest; -}; + var d = {} + d[k] = val + debug("prevalidated val", d, val, types[k]) + if (!validate(d, k, val, types[k], typeDefs)) { + if (exports.invalidHandler) { + exports.invalidHandler(k, val, types[k], data) + } else if (exports.invalidHandler !== false) { + debug("invalid: "+k+"="+val, types[k]) + } + return remove + } + debug("validated val", d, val, types[k]) + return d[k] + }).filter(function (val) { return val !== remove }) -function pipeOnDrain(src) { - return function () { - var state = src._readableState; - debug('pipeOnDrain', state.awaitDrain); - if (state.awaitDrain) state.awaitDrain--; - if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { - state.flowing = true; - flow(src); + // if we allow Array specifically, then an empty array is how we + // express 'no value here', not null. Allow it. + if (!val.length && type.indexOf(Array) === -1) { + debug('VAL HAS NO LENGTH, DELETE IT', val, k, type.indexOf(Array)) + delete data[k] } - }; -} - -Readable.prototype.unpipe = function (dest) { - var state = this._readableState; - var unpipeInfo = { hasUnpiped: false }; - - // if we're not piping anywhere, then do nothing. - if (state.pipesCount === 0) return this; + else if (isArray) { + debug(isArray, data[k], val) + data[k] = val + } else data[k] = val[0] - // just one destination. most common case. - if (state.pipesCount === 1) { - // passed in one, but it's not the right one. - if (dest && dest !== state.pipes) return this; + debug("k=%s val=%j", k, val, data[k]) + }) +} - if (!dest) dest = state.pipes; +function validateString (data, k, val) { + data[k] = String(val) +} - // got a match. - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - if (dest) dest.emit('unpipe', this, unpipeInfo); - return this; - } +function validatePath (data, k, val) { + if (val === true) return false + if (val === null) return true - // slow case. multiple pipe destinations. + val = String(val) - if (!dest) { - // remove all. - var dests = state.pipes; - var len = state.pipesCount; - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; + var isWin = process.platform === 'win32' + , homePattern = isWin ? /^~(\/|\\)/ : /^~\// + , home = osenv.home() - for (var i = 0; i < len; i++) { - dests[i].emit('unpipe', this, unpipeInfo); - }return this; + if (home && val.match(homePattern)) { + data[k] = path.resolve(home, val.substr(2)) + } else { + data[k] = path.resolve(val) } + return true +} - // try to find the right one. - var index = indexOf(state.pipes, dest); - if (index === -1) return this; +function validateNumber (data, k, val) { + debug("validate Number %j %j %j", k, val, isNaN(val)) + if (isNaN(val)) return false + data[k] = +val +} - state.pipes.splice(index, 1); - state.pipesCount -= 1; - if (state.pipesCount === 1) state.pipes = state.pipes[0]; +function validateDate (data, k, val) { + var s = Date.parse(val) + debug("validate Date %j %j %j", k, val, s) + if (isNaN(s)) return false + data[k] = new Date(val) +} - dest.emit('unpipe', this, unpipeInfo); +function validateBoolean (data, k, val) { + if (val instanceof Boolean) val = val.valueOf() + else if (typeof val === "string") { + if (!isNaN(val)) val = !!(+val) + else if (val === "null" || val === "false") val = false + else val = true + } else val = !!val + data[k] = val +} - return this; -}; +function validateUrl (data, k, val) { + val = url.parse(String(val)) + if (!val.host) return false + data[k] = val.href +} -// set up data events if they are asked for -// Ensure readable listeners eventually get something -Readable.prototype.on = function (ev, fn) { - var res = Stream.prototype.on.call(this, ev, fn); +function validateStream (data, k, val) { + if (!(val instanceof Stream)) return false + data[k] = val +} - if (ev === 'data') { - // Start flowing on next tick if stream isn't explicitly paused - if (this._readableState.flowing !== false) this.resume(); - } else if (ev === 'readable') { - var state = this._readableState; - if (!state.endEmitted && !state.readableListening) { - state.readableListening = state.needReadable = true; - state.emittedReadable = false; - if (!state.reading) { - pna.nextTick(nReadingNextTick, this); - } else if (state.length) { - emitReadable(this); - } +function validate (data, k, val, type, typeDefs) { + // arrays are lists of types. + if (Array.isArray(type)) { + for (var i = 0, l = type.length; i < l; i ++) { + if (type[i] === Array) continue + if (validate(data, k, val, type[i], typeDefs)) return true } + delete data[k] + return false } - return res; -}; -Readable.prototype.addListener = Readable.prototype.on; - -function nReadingNextTick(self) { - debug('readable nexttick read 0'); - self.read(0); -} + // an array of anything? + if (type === Array) return true -// pause() and resume() are remnants of the legacy readable stream API -// If the user uses them, then switch into old mode. -Readable.prototype.resume = function () { - var state = this._readableState; - if (!state.flowing) { - debug('resume'); - state.flowing = true; - resume(this, state); + // NaN is poisonous. Means that something is not allowed. + if (type !== type) { + debug("Poison NaN", k, val, type) + delete data[k] + return false } - return this; -}; -function resume(stream, state) { - if (!state.resumeScheduled) { - state.resumeScheduled = true; - pna.nextTick(resume_, stream, state); + // explicit list of values + if (val === type) { + debug("Explicitly allowed %j", val) + // if (isArray) (data[k] = data[k] || []).push(val) + // else data[k] = val + data[k] = val + return true } -} -function resume_(stream, state) { - if (!state.reading) { - debug('resume read 0'); - stream.read(0); + // now go through the list of typeDefs, validate against each one. + var ok = false + , types = Object.keys(typeDefs) + for (var i = 0, l = types.length; i < l; i ++) { + debug("test type %j %j %j", k, val, types[i]) + var t = typeDefs[types[i]] + if (t && + ((type && type.name && t.type && t.type.name) ? (type.name === t.type.name) : (type === t.type))) { + var d = {} + ok = false !== t.validate(d, k, val) + val = d[k] + if (ok) { + // if (isArray) (data[k] = data[k] || []).push(val) + // else data[k] = val + data[k] = val + break + } + } } + debug("OK? %j (%j %j %j)", ok, k, val, types[i]) - state.resumeScheduled = false; - state.awaitDrain = 0; - stream.emit('resume'); - flow(stream); - if (state.flowing && !state.reading) stream.read(0); + if (!ok) delete data[k] + return ok } -Readable.prototype.pause = function () { - debug('call pause flowing=%j', this._readableState.flowing); - if (false !== this._readableState.flowing) { - debug('pause'); - this._readableState.flowing = false; - this.emit('pause'); - } - return this; -}; - -function flow(stream) { - var state = stream._readableState; - debug('flow', state.flowing); - while (state.flowing && stream.read() !== null) {} -} +function parse (args, data, remain, types, shorthands) { + debug("parse", args, data, remain) -// wrap an old-style stream as the async data source. -// This is *not* part of the readable stream interface. -// It is an ugly unfortunate mess of history. -Readable.prototype.wrap = function (stream) { - var _this = this; + var key = null + , abbrevs = abbrev(Object.keys(types)) + , shortAbbr = abbrev(Object.keys(shorthands)) - var state = this._readableState; - var paused = false; + for (var i = 0; i < args.length; i ++) { + var arg = args[i] + debug("arg", arg) - stream.on('end', function () { - debug('wrapped end'); - if (state.decoder && !state.ended) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) _this.push(chunk); + if (arg.match(/^-{2,}$/)) { + // done with keys. + // the rest are args. + remain.push.apply(remain, args.slice(i + 1)) + args[i] = "--" + break } + var hadEq = false + if (arg.charAt(0) === "-" && arg.length > 1) { + var at = arg.indexOf('=') + if (at > -1) { + hadEq = true + var v = arg.substr(at + 1) + arg = arg.substr(0, at) + args.splice(i, 1, arg, v) + } - _this.push(null); - }); - - stream.on('data', function (chunk) { - debug('wrapped data'); - if (state.decoder) chunk = state.decoder.write(chunk); - - // don't skip over falsy values in objectMode - if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; - - var ret = _this.push(chunk); - if (!ret) { - paused = true; - stream.pause(); - } - }); + // see if it's a shorthand + // if so, splice and back up to re-parse it. + var shRes = resolveShort(arg, shorthands, shortAbbr, abbrevs) + debug("arg=%j shRes=%j", arg, shRes) + if (shRes) { + debug(arg, shRes) + args.splice.apply(args, [i, 1].concat(shRes)) + if (arg !== shRes[0]) { + i -- + continue + } + } + arg = arg.replace(/^-+/, "") + var no = null + while (arg.toLowerCase().indexOf("no-") === 0) { + no = !no + arg = arg.substr(3) + } - // proxy all the other methods. - // important when wrapping filters and duplexes. - for (var i in stream) { - if (this[i] === undefined && typeof stream[i] === 'function') { - this[i] = function (method) { - return function () { - return stream[method].apply(stream, arguments); - }; - }(i); - } - } + if (abbrevs[arg]) arg = abbrevs[arg] - // proxy certain important events. - for (var n = 0; n < kProxyEvents.length; n++) { - stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); - } + var argType = types[arg] + var isTypeArray = Array.isArray(argType) + if (isTypeArray && argType.length === 1) { + isTypeArray = false + argType = argType[0] + } - // when we try to consume some more bytes, simply unpause the - // underlying stream. - this._read = function (n) { - debug('wrapped _read', n); - if (paused) { - paused = false; - stream.resume(); - } - }; + var isArray = argType === Array || + isTypeArray && argType.indexOf(Array) !== -1 - return this; -}; + // allow unknown things to be arrays if specified multiple times. + if (!types.hasOwnProperty(arg) && data.hasOwnProperty(arg)) { + if (!Array.isArray(data[arg])) + data[arg] = [data[arg]] + isArray = true + } -Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function () { - return this._readableState.highWaterMark; - } -}); + var val + , la = args[i + 1] -// exposed for testing purposes only. -Readable._fromList = fromList; + var isBool = typeof no === 'boolean' || + argType === Boolean || + isTypeArray && argType.indexOf(Boolean) !== -1 || + (typeof argType === 'undefined' && !hadEq) || + (la === "false" && + (argType === null || + isTypeArray && ~argType.indexOf(null))) -// Pluck off n bytes from an array of buffers. -// Length is the combined lengths of all the buffers in the list. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function fromList(n, state) { - // nothing buffered - if (state.length === 0) return null; + if (isBool) { + // just set and move along + val = !no + // however, also support --bool true or --bool false + if (la === "true" || la === "false") { + val = JSON.parse(la) + la = null + if (no) val = !val + i ++ + } - var ret; - if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { - // read it all, truncate the list - if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); - state.buffer.clear(); - } else { - // read part of list - ret = fromListPartial(n, state.buffer, state.decoder); - } + // also support "foo":[Boolean, "bar"] and "--foo bar" + if (isTypeArray && la) { + if (~argType.indexOf(la)) { + // an explicit type + val = la + i ++ + } else if ( la === "null" && ~argType.indexOf(null) ) { + // null allowed + val = null + i ++ + } else if ( !la.match(/^-{2,}[^-]/) && + !isNaN(la) && + ~argType.indexOf(Number) ) { + // number + val = +la + i ++ + } else if ( !la.match(/^-[^-]/) && ~argType.indexOf(String) ) { + // string + val = la + i ++ + } + } - return ret; -} + if (isArray) (data[arg] = data[arg] || []).push(val) + else data[arg] = val -// Extracts only enough buffered data to satisfy the amount requested. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function fromListPartial(n, list, hasStrings) { - var ret; - if (n < list.head.data.length) { - // slice is the same for buffers and strings - ret = list.head.data.slice(0, n); - list.head.data = list.head.data.slice(n); - } else if (n === list.head.data.length) { - // first chunk is a perfect match - ret = list.shift(); - } else { - // result spans more than one buffer - ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); - } - return ret; -} + continue + } -// Copies a specified amount of characters from the list of buffered data -// chunks. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function copyFromBufferString(n, list) { - var p = list.head; - var c = 1; - var ret = p.data; - n -= ret.length; - while (p = p.next) { - var str = p.data; - var nb = n > str.length ? str.length : n; - if (nb === str.length) ret += str;else ret += str.slice(0, n); - n -= nb; - if (n === 0) { - if (nb === str.length) { - ++c; - if (p.next) list.head = p.next;else list.head = list.tail = null; - } else { - list.head = p; - p.data = str.slice(nb); + if (argType === String) { + if (la === undefined) { + la = "" + } else if (la.match(/^-{1,2}[^-]+/)) { + la = "" + i -- + } } - break; - } - ++c; - } - list.length -= c; - return ret; -} -// Copies a specified amount of bytes from the list of buffered data chunks. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function copyFromBuffer(n, list) { - var ret = Buffer.allocUnsafe(n); - var p = list.head; - var c = 1; - p.data.copy(ret); - n -= p.data.length; - while (p = p.next) { - var buf = p.data; - var nb = n > buf.length ? buf.length : n; - buf.copy(ret, ret.length - n, 0, nb); - n -= nb; - if (n === 0) { - if (nb === buf.length) { - ++c; - if (p.next) list.head = p.next;else list.head = list.tail = null; - } else { - list.head = p; - p.data = buf.slice(nb); + if (la && la.match(/^-{2,}$/)) { + la = undefined + i -- } - break; + + val = la === undefined ? true : la + if (isArray) (data[arg] = data[arg] || []).push(val) + else data[arg] = val + + i ++ + continue } - ++c; + remain.push(arg) } - list.length -= c; - return ret; } -function endReadable(stream) { - var state = stream._readableState; +function resolveShort (arg, shorthands, shortAbbr, abbrevs) { + // handle single-char shorthands glommed together, like + // npm ls -glp, but only if there is one dash, and only if + // all of the chars are single-char shorthands, and it's + // not a match to some other abbrev. + arg = arg.replace(/^-+/, '') - // If we get here before consuming all the bytes, then that is a - // bug in node. Should never happen. - if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); + // if it's an exact known option, then don't go any further + if (abbrevs[arg] === arg) + return null - if (!state.endEmitted) { - state.ended = true; - pna.nextTick(endReadableNT, state, stream); - } -} + // if it's an exact known shortopt, same deal + if (shorthands[arg]) { + // make it an array, if it's a list of words + if (shorthands[arg] && !Array.isArray(shorthands[arg])) + shorthands[arg] = shorthands[arg].split(/\s+/) -function endReadableNT(state, stream) { - // Check that we didn't get one last unshift. - if (!state.endEmitted && state.length === 0) { - state.endEmitted = true; - stream.readable = false; - stream.emit('end'); + return shorthands[arg] } -} -function indexOf(xs, x) { - for (var i = 0, l = xs.length; i < l; i++) { - if (xs[i] === x) return i; + // first check to see if this arg is a set of single-char shorthands + var singles = shorthands.___singles + if (!singles) { + singles = Object.keys(shorthands).filter(function (s) { + return s.length === 1 + }).reduce(function (l,r) { + l[r] = true + return l + }, {}) + shorthands.___singles = singles + debug('shorthand singles', singles) } - return -1; -} -/***/ }), + var chrs = arg.split("").filter(function (c) { + return singles[c] + }) -/***/ 6500: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (chrs.join("") === arg) return chrs.map(function (c) { + return shorthands[c] + }).reduce(function (l, r) { + return l.concat(r) + }, []) -"use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. -// a transform stream is a readable/writable stream where you do -// something with the data. Sometimes it's called a "filter", -// but that's not a great name for it, since that implies a thing where -// some bits pass through, and others are simply ignored. (That would -// be a valid example of a transform, of course.) -// -// While the output is causally related to the input, it's not a -// necessarily symmetric or synchronous transformation. For example, -// a zlib stream might take multiple plain-text writes(), and then -// emit a single compressed chunk some time in the future. -// -// Here's how this works: -// -// The Transform stream has all the aspects of the readable and writable -// stream classes. When you write(chunk), that calls _write(chunk,cb) -// internally, and returns false if there's a lot of pending writes -// buffered up. When you call read(), that calls _read(n) until -// there's enough pending readable data buffered up. -// -// In a transform stream, the written data is placed in a buffer. When -// _read(n) is called, it transforms the queued up data, calling the -// buffered _write cb's as it consumes chunks. If consuming a single -// written chunk would result in multiple output chunks, then the first -// outputted bit calls the readcb, and subsequent chunks just go into -// the read buffer, and will cause it to emit 'readable' if necessary. -// -// This way, back-pressure is actually determined by the reading side, -// since _read has to be called to start processing a new chunk. However, -// a pathological inflate type of transform can cause excessive buffering -// here. For example, imagine a stream where every byte of input is -// interpreted as an integer from 0-255, and then results in that many -// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in -// 1kb of data being output. In this case, you could write a very small -// amount of input, and end up with a very large amount of output. In -// such a pathological inflating mechanism, there'd be no way to tell -// the system to stop doing the transform. A single 4MB write could -// cause the system to run out of memory. -// -// However, even in such a pathological case, only a single written chunk -// would be consumed, and then the rest would wait (un-transformed) until -// the results of the previous transformed chunk were consumed. + // if it's an arg abbrev, and not a literal shorthand, then prefer the arg + if (abbrevs[arg] && !shorthands[arg]) + return null + // if it's an abbr for a shorthand, then use that + if (shortAbbr[arg]) + arg = shortAbbr[arg] + // make it an array, if it's a list of words + if (shorthands[arg] && !Array.isArray(shorthands[arg])) + shorthands[arg] = shorthands[arg].split(/\s+/) -module.exports = Transform; + return shorthands[arg] +} -var Duplex = __nccwpck_require__(26637); -/**/ -var util = Object.create(__nccwpck_require__(95898)); -util.inherits = __nccwpck_require__(44124); -/**/ +/***/ }), -util.inherits(Transform, Duplex); +/***/ 99120: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -function afterTransform(er, data) { - var ts = this._transformState; - ts.transforming = false; +module.exports = rimraf +rimraf.sync = rimrafSync - var cb = ts.writecb; +var assert = __nccwpck_require__(42357) +var path = __nccwpck_require__(85622) +var fs = __nccwpck_require__(35747) +var glob = undefined +try { + glob = __nccwpck_require__(91957) +} catch (_err) { + // treat glob as optional. +} +var _0666 = parseInt('666', 8) - if (!cb) { - return this.emit('error', new Error('write callback called multiple times')); - } +var defaultGlobOpts = { + nosort: true, + silent: true +} - ts.writechunk = null; - ts.writecb = null; +// for EMFILE handling +var timeout = 0 - if (data != null) // single equals check for both `null` and `undefined` - this.push(data); +var isWindows = (process.platform === "win32") - cb(er); +function defaults (options) { + var methods = [ + 'unlink', + 'chmod', + 'stat', + 'lstat', + 'rmdir', + 'readdir' + ] + methods.forEach(function(m) { + options[m] = options[m] || fs[m] + m = m + 'Sync' + options[m] = options[m] || fs[m] + }) - var rs = this._readableState; - rs.reading = false; - if (rs.needReadable || rs.length < rs.highWaterMark) { - this._read(rs.highWaterMark); + options.maxBusyTries = options.maxBusyTries || 3 + options.emfileWait = options.emfileWait || 1000 + if (options.glob === false) { + options.disableGlob = true } + if (options.disableGlob !== true && glob === undefined) { + throw Error('glob dependency not found, set `options.disableGlob = true` if intentional') + } + options.disableGlob = options.disableGlob || false + options.glob = options.glob || defaultGlobOpts } -function Transform(options) { - if (!(this instanceof Transform)) return new Transform(options); +function rimraf (p, options, cb) { + if (typeof options === 'function') { + cb = options + options = {} + } - Duplex.call(this, options); + assert(p, 'rimraf: missing path') + assert.equal(typeof p, 'string', 'rimraf: path should be a string') + assert.equal(typeof cb, 'function', 'rimraf: callback function required') + assert(options, 'rimraf: invalid options argument provided') + assert.equal(typeof options, 'object', 'rimraf: options should be object') - this._transformState = { - afterTransform: afterTransform.bind(this), - needTransform: false, - transforming: false, - writecb: null, - writechunk: null, - writeencoding: null - }; + defaults(options) - // start out asking for a readable event once data is transformed. - this._readableState.needReadable = true; + var busyTries = 0 + var errState = null + var n = 0 - // we have implemented the _read method, and done the other things - // that Readable wants before the first _read call, so unset the - // sync guard flag. - this._readableState.sync = false; + if (options.disableGlob || !glob.hasMagic(p)) + return afterGlob(null, [p]) - if (options) { - if (typeof options.transform === 'function') this._transform = options.transform; + options.lstat(p, function (er, stat) { + if (!er) + return afterGlob(null, [p]) - if (typeof options.flush === 'function') this._flush = options.flush; + glob(p, options.glob, afterGlob) + }) + + function next (er) { + errState = errState || er + if (--n === 0) + cb(errState) } - // When the writable side finishes, then flush out anything remaining. - this.on('prefinish', prefinish); -} + function afterGlob (er, results) { + if (er) + return cb(er) -function prefinish() { - var _this = this; + n = results.length + if (n === 0) + return cb() - if (typeof this._flush === 'function') { - this._flush(function (er, data) { - done(_this, er, data); - }); - } else { - done(this, null, null); + results.forEach(function (p) { + rimraf_(p, options, function CB (er) { + if (er) { + if ((er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") && + busyTries < options.maxBusyTries) { + busyTries ++ + var time = busyTries * 100 + // try again, with the same exact callback as this one. + return setTimeout(function () { + rimraf_(p, options, CB) + }, time) + } + + // this one won't happen if graceful-fs is used. + if (er.code === "EMFILE" && timeout < options.emfileWait) { + return setTimeout(function () { + rimraf_(p, options, CB) + }, timeout ++) + } + + // already gone + if (er.code === "ENOENT") er = null + } + + timeout = 0 + next(er) + }) + }) } } -Transform.prototype.push = function (chunk, encoding) { - this._transformState.needTransform = false; - return Duplex.prototype.push.call(this, chunk, encoding); -}; - -// This is the part where you do stuff! -// override this function in implementation classes. -// 'chunk' is an input chunk. +// Two possible strategies. +// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR +// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR // -// Call `push(newChunk)` to pass along transformed output -// to the readable side. You may call 'push' zero or more times. +// Both result in an extra syscall when you guess wrong. However, there +// are likely far more normal files in the world than directories. This +// is based on the assumption that a the average number of files per +// directory is >= 1. // -// Call `cb(err)` when you are done with this chunk. If you pass -// an error, then that'll put the hurt on the whole operation. If you -// never call cb(), then you'll never get another chunk. -Transform.prototype._transform = function (chunk, encoding, cb) { - throw new Error('_transform() is not implemented'); -}; +// If anyone ever complains about this, then I guess the strategy could +// be made configurable somehow. But until then, YAGNI. +function rimraf_ (p, options, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') -Transform.prototype._write = function (chunk, encoding, cb) { - var ts = this._transformState; - ts.writecb = cb; - ts.writechunk = chunk; - ts.writeencoding = encoding; - if (!ts.transforming) { - var rs = this._readableState; - if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); - } -}; + // sunos lets the root user unlink directories, which is... weird. + // so we have to lstat here and make sure it's not a dir. + options.lstat(p, function (er, st) { + if (er && er.code === "ENOENT") + return cb(null) -// Doesn't matter what the args are here. -// _transform does all the work. -// That we got here means that the readable side wants more data. -Transform.prototype._read = function (n) { - var ts = this._transformState; + // Windows can EPERM on stat. Life is suffering. + if (er && er.code === "EPERM" && isWindows) + fixWinEPERM(p, options, er, cb) - if (ts.writechunk !== null && ts.writecb && !ts.transforming) { - ts.transforming = true; - this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); - } else { - // mark that we need a transform, so that any data that comes in - // will get processed, now that we've asked for it. - ts.needTransform = true; - } -}; + if (st && st.isDirectory()) + return rmdir(p, options, er, cb) -Transform.prototype._destroy = function (err, cb) { - var _this2 = this; + options.unlink(p, function (er) { + if (er) { + if (er.code === "ENOENT") + return cb(null) + if (er.code === "EPERM") + return (isWindows) + ? fixWinEPERM(p, options, er, cb) + : rmdir(p, options, er, cb) + if (er.code === "EISDIR") + return rmdir(p, options, er, cb) + } + return cb(er) + }) + }) +} - Duplex.prototype._destroy.call(this, err, function (err2) { - cb(err2); - _this2.emit('close'); - }); -}; +function fixWinEPERM (p, options, er, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') + if (er) + assert(er instanceof Error) -function done(stream, er, data) { - if (er) return stream.emit('error', er); + options.chmod(p, _0666, function (er2) { + if (er2) + cb(er2.code === "ENOENT" ? null : er) + else + options.stat(p, function(er3, stats) { + if (er3) + cb(er3.code === "ENOENT" ? null : er) + else if (stats.isDirectory()) + rmdir(p, options, er, cb) + else + options.unlink(p, cb) + }) + }) +} - if (data != null) // single equals check for both `null` and `undefined` - stream.push(data); +function fixWinEPERMSync (p, options, er) { + assert(p) + assert(options) + if (er) + assert(er instanceof Error) - // if there's nothing in the write buffer, then that means - // that nothing more will ever be provided - if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0'); + try { + options.chmodSync(p, _0666) + } catch (er2) { + if (er2.code === "ENOENT") + return + else + throw er + } - if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming'); + try { + var stats = options.statSync(p) + } catch (er3) { + if (er3.code === "ENOENT") + return + else + throw er + } - return stream.push(null); + if (stats.isDirectory()) + rmdirSync(p, options, er) + else + options.unlinkSync(p) } -/***/ }), +function rmdir (p, options, originalEr, cb) { + assert(p) + assert(options) + if (originalEr) + assert(originalEr instanceof Error) + assert(typeof cb === 'function') -/***/ 81008: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) + // if we guessed wrong, and it's not a directory, then + // raise the original error. + options.rmdir(p, function (er) { + if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM")) + rmkids(p, options, cb) + else if (er && er.code === "ENOTDIR") + cb(originalEr) + else + cb(er) + }) +} -"use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. +function rmkids(p, options, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') -// A bit simpler than readable streams. -// Implement an async ._write(chunk, encoding, cb), and it'll handle all -// the drain event emission and buffering. + options.readdir(p, function (er, files) { + if (er) + return cb(er) + var n = files.length + if (n === 0) + return options.rmdir(p, cb) + var errState + files.forEach(function (f) { + rimraf(path.join(p, f), options, function (er) { + if (errState) + return + if (er) + return cb(errState = er) + if (--n === 0) + options.rmdir(p, cb) + }) + }) + }) +} + +// this looks simpler, and is strictly *faster*, but will +// tie up the JavaScript thread and fail on excessively +// deep directory trees. +function rimrafSync (p, options) { + options = options || {} + defaults(options) + + assert(p, 'rimraf: missing path') + assert.equal(typeof p, 'string', 'rimraf: path should be a string') + assert(options, 'rimraf: missing options') + assert.equal(typeof options, 'object', 'rimraf: options should be object') + + var results + + if (options.disableGlob || !glob.hasMagic(p)) { + results = [p] + } else { + try { + options.lstatSync(p) + results = [p] + } catch (er) { + results = glob.sync(p, options.glob) + } + } + if (!results.length) + return + for (var i = 0; i < results.length; i++) { + var p = results[i] -/**/ + try { + var st = options.lstatSync(p) + } catch (er) { + if (er.code === "ENOENT") + return -var pna = __nccwpck_require__(47810); -/**/ + // Windows can EPERM on stat. Life is suffering. + if (er.code === "EPERM" && isWindows) + fixWinEPERMSync(p, options, er) + } -module.exports = Writable; + try { + // sunos lets the root user unlink directories, which is... weird. + if (st && st.isDirectory()) + rmdirSync(p, options, null) + else + options.unlinkSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + if (er.code === "EPERM") + return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) + if (er.code !== "EISDIR") + throw er -/* */ -function WriteReq(chunk, encoding, cb) { - this.chunk = chunk; - this.encoding = encoding; - this.callback = cb; - this.next = null; + rmdirSync(p, options, er) + } + } } -// It seems a linked list but it is not -// there will be only 2 of these for each stream -function CorkedRequest(state) { - var _this = this; +function rmdirSync (p, options, originalEr) { + assert(p) + assert(options) + if (originalEr) + assert(originalEr instanceof Error) - this.next = null; - this.entry = null; - this.finish = function () { - onCorkedFinish(_this, state); - }; + try { + options.rmdirSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + if (er.code === "ENOTDIR") + throw originalEr + if (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM") + rmkidsSync(p, options) + } } -/* */ - -/**/ -var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick; -/**/ -/**/ -var Duplex; -/**/ +function rmkidsSync (p, options) { + assert(p) + assert(options) + options.readdirSync(p).forEach(function (f) { + rimrafSync(path.join(p, f), options) + }) -Writable.WritableState = WritableState; + // We only end up here once we got ENOTEMPTY at least once, and + // at this point, we are guaranteed to have removed all the kids. + // So, we know that it won't be ENOENT or ENOTDIR or anything else. + // try really hard to delete stuff on windows, because it has a + // PROFOUNDLY annoying habit of not closing handles promptly when + // files are deleted, resulting in spurious ENOTEMPTY errors. + var retries = isWindows ? 100 : 1 + var i = 0 + do { + var threw = true + try { + var ret = options.rmdirSync(p, options) + threw = false + return ret + } finally { + if (++i < retries && threw) + continue + } + } while (true) +} -/**/ -var util = Object.create(__nccwpck_require__(95898)); -util.inherits = __nccwpck_require__(44124); -/**/ -/**/ -var internalUtil = { - deprecate: __nccwpck_require__(65278) -}; -/**/ +/***/ }), -/**/ -var Stream = __nccwpck_require__(78743); -/**/ +/***/ 2964: +/***/ ((module, exports) => { -/**/ +exports = module.exports = SemVer -var Buffer = __nccwpck_require__(1206).Buffer; -var OurUint8Array = global.Uint8Array || function () {}; -function _uint8ArrayToBuffer(chunk) { - return Buffer.from(chunk); -} -function _isUint8Array(obj) { - return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +var debug +/* istanbul ignore next */ +if (typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG)) { + debug = function () { + var args = Array.prototype.slice.call(arguments, 0) + args.unshift('SEMVER') + console.log.apply(console, args) + } +} else { + debug = function () {} } -/**/ +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +exports.SEMVER_SPEC_VERSION = '2.0.0' -var destroyImpl = __nccwpck_require__(13166); +var MAX_LENGTH = 256 +var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || + /* istanbul ignore next */ 9007199254740991 -util.inherits(Writable, Stream); +// Max safe segment length for coercion. +var MAX_SAFE_COMPONENT_LENGTH = 16 -function nop() {} +// The actual regexps go on exports.re +var re = exports.re = [] +var src = exports.src = [] +var R = 0 -function WritableState(options, stream) { - Duplex = Duplex || __nccwpck_require__(26637); +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. - options = options || {}; +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. - // Duplex streams are both readable and writable, but share - // the same options object. - // However, some cases require setting options to different - // values for the readable and the writable sides of the duplex stream. - // These options can be provided separately as readableXXX and writableXXX. - var isDuplex = stream instanceof Duplex; +var NUMERICIDENTIFIER = R++ +src[NUMERICIDENTIFIER] = '0|[1-9]\\d*' +var NUMERICIDENTIFIERLOOSE = R++ +src[NUMERICIDENTIFIERLOOSE] = '[0-9]+' - // object stream flag to indicate whether or not this stream - // contains buffers or objects. - this.objectMode = !!options.objectMode; +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. - if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; +var NONNUMERICIDENTIFIER = R++ +src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' - // the point at which write() starts returning false - // Note: 0 is a valid value, means that we always return false if - // the entire buffer is not flushed immediately on write() - var hwm = options.highWaterMark; - var writableHwm = options.writableHighWaterMark; - var defaultHwm = this.objectMode ? 16 : 16 * 1024; +// ## Main Version +// Three dot-separated numeric identifiers. - if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm; +var MAINVERSION = R++ +src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' + + '(' + src[NUMERICIDENTIFIER] + ')\\.' + + '(' + src[NUMERICIDENTIFIER] + ')' - // cast to ints. - this.highWaterMark = Math.floor(this.highWaterMark); +var MAINVERSIONLOOSE = R++ +src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[NUMERICIDENTIFIERLOOSE] + ')' - // if _final has been called - this.finalCalled = false; +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. - // drain event flag. - this.needDrain = false; - // at the start of calling end() - this.ending = false; - // when end() has been called, and returned - this.ended = false; - // when 'finish' is emitted - this.finished = false; +var PRERELEASEIDENTIFIER = R++ +src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] + + '|' + src[NONNUMERICIDENTIFIER] + ')' - // has it been destroyed - this.destroyed = false; +var PRERELEASEIDENTIFIERLOOSE = R++ +src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] + + '|' + src[NONNUMERICIDENTIFIER] + ')' - // should we decode strings into buffers before passing to _write? - // this is here so that some node-core streams can optimize string - // handling at a lower level. - var noDecode = options.decodeStrings === false; - this.decodeStrings = !noDecode; +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = options.defaultEncoding || 'utf8'; +var PRERELEASE = R++ +src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] + + '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))' - // not an actual buffer we keep track of, but a measurement - // of how much we're waiting to get pushed to some underlying - // socket or file. - this.length = 0; +var PRERELEASELOOSE = R++ +src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] + + '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))' - // a flag to see when we're in the middle of a write. - this.writing = false; +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. - // when true all writes will be buffered until .uncork() call - this.corked = 0; +var BUILDIDENTIFIER = R++ +src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+' - // a flag to be able to tell if the onwrite cb is called immediately, - // or on a later tick. We set this to true at first, because any - // actions that shouldn't happen until "later" should generally also - // not happen before the first write call. - this.sync = true; +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. - // a flag to know if we're processing previously buffered items, which - // may call the _write() callback in the same tick, so that we don't - // end up in an overlapped onwrite situation. - this.bufferProcessing = false; +var BUILD = R++ +src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + + '(?:\\.' + src[BUILDIDENTIFIER] + ')*))' - // the callback that's passed to _write(chunk,cb) - this.onwrite = function (er) { - onwrite(stream, er); - }; +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. - // the callback that the user supplies to write(chunk,encoding,cb) - this.writecb = null; +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. - // the amount that is being written when _write is called. - this.writelen = 0; +var FULL = R++ +var FULLPLAIN = 'v?' + src[MAINVERSION] + + src[PRERELEASE] + '?' + + src[BUILD] + '?' - this.bufferedRequest = null; - this.lastBufferedRequest = null; +src[FULL] = '^' + FULLPLAIN + '$' - // number of pending user-supplied write callbacks - // this must be 0 before 'finish' can be emitted - this.pendingcb = 0; +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] + + src[PRERELEASELOOSE] + '?' + + src[BUILD] + '?' - // emit prefinish if the only thing we're waiting for is _write cbs - // This is relevant for synchronous Transform streams - this.prefinished = false; +var LOOSE = R++ +src[LOOSE] = '^' + LOOSEPLAIN + '$' - // True if the error was already emitted and should not be thrown again - this.errorEmitted = false; +var GTLT = R++ +src[GTLT] = '((?:<|>)?=?)' - // count buffered requests - this.bufferedRequestCount = 0; +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +var XRANGEIDENTIFIERLOOSE = R++ +src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' +var XRANGEIDENTIFIER = R++ +src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*' - // allocate the first CorkedRequest, there is always - // one allocated and free to use, and we maintain at most two - this.corkedRequestsFree = new CorkedRequest(this); -} +var XRANGEPLAIN = R++ +src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + + '(?:' + src[PRERELEASE] + ')?' + + src[BUILD] + '?' + + ')?)?' -WritableState.prototype.getBuffer = function getBuffer() { - var current = this.bufferedRequest; - var out = []; - while (current) { - out.push(current); - current = current.next; - } - return out; -}; +var XRANGEPLAINLOOSE = R++ +src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:' + src[PRERELEASELOOSE] + ')?' + + src[BUILD] + '?' + + ')?)?' -(function () { - try { - Object.defineProperty(WritableState.prototype, 'buffer', { - get: internalUtil.deprecate(function () { - return this.getBuffer(); - }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') - }); - } catch (_) {} -})(); +var XRANGE = R++ +src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$' +var XRANGELOOSE = R++ +src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$' -// Test _writableState for inheritance to account for Duplex streams, -// whose prototype chain only points to Readable. -var realHasInstance; -if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { - realHasInstance = Function.prototype[Symbol.hasInstance]; - Object.defineProperty(Writable, Symbol.hasInstance, { - value: function (object) { - if (realHasInstance.call(this, object)) return true; - if (this !== Writable) return false; +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +var COERCE = R++ +src[COERCE] = '(?:^|[^\\d])' + + '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:$|[^\\d])' - return object && object._writableState instanceof WritableState; - } - }); -} else { - realHasInstance = function (object) { - return object instanceof this; - }; -} +// Tilde ranges. +// Meaning is "reasonably at or greater than" +var LONETILDE = R++ +src[LONETILDE] = '(?:~>?)' -function Writable(options) { - Duplex = Duplex || __nccwpck_require__(26637); +var TILDETRIM = R++ +src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+' +re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g') +var tildeTrimReplace = '$1~' - // Writable ctor is applied to Duplexes, too. - // `realHasInstance` is necessary because using plain `instanceof` - // would return false, as no `_writableState` property is attached. +var TILDE = R++ +src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$' +var TILDELOOSE = R++ +src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$' - // Trying to use the custom `instanceof` for Writable here will also break the - // Node.js LazyTransform implementation, which has a non-trivial getter for - // `_writableState` that would lead to infinite recursion. - if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) { - return new Writable(options); - } +// Caret ranges. +// Meaning is "at least and backwards compatible with" +var LONECARET = R++ +src[LONECARET] = '(?:\\^)' - this._writableState = new WritableState(options, this); +var CARETTRIM = R++ +src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+' +re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g') +var caretTrimReplace = '$1^' - // legacy. - this.writable = true; +var CARET = R++ +src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$' +var CARETLOOSE = R++ +src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$' - if (options) { - if (typeof options.write === 'function') this._write = options.write; +// A simple gt/lt/eq thing, or just "" to indicate "any version" +var COMPARATORLOOSE = R++ +src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$' +var COMPARATOR = R++ +src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$' - if (typeof options.writev === 'function') this._writev = options.writev; +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +var COMPARATORTRIM = R++ +src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] + + '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')' - if (typeof options.destroy === 'function') this._destroy = options.destroy; +// this one has to use the /g flag +re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g') +var comparatorTrimReplace = '$1$2$3' - if (typeof options.final === 'function') this._final = options.final; - } +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +var HYPHENRANGE = R++ +src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' + + '\\s+-\\s+' + + '(' + src[XRANGEPLAIN] + ')' + + '\\s*$' - Stream.call(this); -} +var HYPHENRANGELOOSE = R++ +src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' + + '\\s+-\\s+' + + '(' + src[XRANGEPLAINLOOSE] + ')' + + '\\s*$' -// Otherwise people can pipe Writable streams, which is just wrong. -Writable.prototype.pipe = function () { - this.emit('error', new Error('Cannot pipe, not readable')); -}; +// Star ranges basically just allow anything at all. +var STAR = R++ +src[STAR] = '(<|>)?=?\\s*\\*' -function writeAfterEnd(stream, cb) { - var er = new Error('write after end'); - // TODO: defer error events consistently everywhere, not just the cb - stream.emit('error', er); - pna.nextTick(cb, er); +// Compile to actual regexp objects. +// All are flag-free, unless they were created above with a flag. +for (var i = 0; i < R; i++) { + debug(i, src[i]) + if (!re[i]) { + re[i] = new RegExp(src[i]) + } } -// Checks that a user-supplied chunk is valid, especially for the particular -// mode the stream is in. Currently this means that `null` is never accepted -// and undefined/non-string values are only allowed in object mode. -function validChunk(stream, state, chunk, cb) { - var valid = true; - var er = false; +exports.parse = parse +function parse (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } - if (chunk === null) { - er = new TypeError('May not write null values to stream'); - } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { - er = new TypeError('Invalid non-string/buffer chunk'); + if (version instanceof SemVer) { + return version } - if (er) { - stream.emit('error', er); - pna.nextTick(cb, er); - valid = false; + + if (typeof version !== 'string') { + return null } - return valid; -} -Writable.prototype.write = function (chunk, encoding, cb) { - var state = this._writableState; - var ret = false; - var isBuf = !state.objectMode && _isUint8Array(chunk); + if (version.length > MAX_LENGTH) { + return null + } - if (isBuf && !Buffer.isBuffer(chunk)) { - chunk = _uint8ArrayToBuffer(chunk); + var r = options.loose ? re[LOOSE] : re[FULL] + if (!r.test(version)) { + return null } - if (typeof encoding === 'function') { - cb = encoding; - encoding = null; + try { + return new SemVer(version, options) + } catch (er) { + return null } +} - if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; +exports.valid = valid +function valid (version, options) { + var v = parse(version, options) + return v ? v.version : null +} - if (typeof cb !== 'function') cb = nop; +exports.clean = clean +function clean (version, options) { + var s = parse(version.trim().replace(/^[=v]+/, ''), options) + return s ? s.version : null +} - if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { - state.pendingcb++; - ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); - } +exports.SemVer = SemVer - return ret; -}; +function SemVer (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + if (version instanceof SemVer) { + if (version.loose === options.loose) { + return version + } else { + version = version.version + } + } else if (typeof version !== 'string') { + throw new TypeError('Invalid Version: ' + version) + } -Writable.prototype.cork = function () { - var state = this._writableState; + if (version.length > MAX_LENGTH) { + throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') + } - state.corked++; -}; + if (!(this instanceof SemVer)) { + return new SemVer(version, options) + } -Writable.prototype.uncork = function () { - var state = this._writableState; + debug('SemVer', version, options) + this.options = options + this.loose = !!options.loose - if (state.corked) { - state.corked--; + var m = version.trim().match(options.loose ? re[LOOSE] : re[FULL]) - if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + if (!m) { + throw new TypeError('Invalid Version: ' + version) } -}; -Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { - // node::ParseEncoding() requires lower case. - if (typeof encoding === 'string') encoding = encoding.toLowerCase(); - if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); - this._writableState.defaultEncoding = encoding; - return this; -}; + this.raw = version -function decodeChunk(state, chunk, encoding) { - if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { - chunk = Buffer.from(chunk, encoding); - } - return chunk; -} + // these are actually numbers + this.major = +m[1] + this.minor = +m[2] + this.patch = +m[3] -Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function () { - return this._writableState.highWaterMark; + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') } -}); -// if we're already writing something, then just put this -// in the queue, and wait our turn. Otherwise, call _write -// If we return false, then we need a drain event, so set that flag. -function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { - if (!isBuf) { - var newChunk = decodeChunk(state, chunk, encoding); - if (chunk !== newChunk) { - isBuf = true; - encoding = 'buffer'; - chunk = newChunk; - } + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') } - var len = state.objectMode ? 1 : chunk.length; - - state.length += len; - var ret = state.length < state.highWaterMark; - // we must ensure that previous needDrain will not be reset to false. - if (!ret) state.needDrain = true; + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') + } - if (state.writing || state.corked) { - var last = state.lastBufferedRequest; - state.lastBufferedRequest = { - chunk: chunk, - encoding: encoding, - isBuf: isBuf, - callback: cb, - next: null - }; - if (last) { - last.next = state.lastBufferedRequest; - } else { - state.bufferedRequest = state.lastBufferedRequest; - } - state.bufferedRequestCount += 1; + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = [] } else { - doWrite(stream, state, false, len, chunk, encoding, cb); + this.prerelease = m[4].split('.').map(function (id) { + if (/^[0-9]+$/.test(id)) { + var num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num + } + } + return id + }) } - return ret; + this.build = m[5] ? m[5].split('.') : [] + this.format() } -function doWrite(stream, state, writev, len, chunk, encoding, cb) { - state.writelen = len; - state.writecb = cb; - state.writing = true; - state.sync = true; - if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); - state.sync = false; +SemVer.prototype.format = function () { + this.version = this.major + '.' + this.minor + '.' + this.patch + if (this.prerelease.length) { + this.version += '-' + this.prerelease.join('.') + } + return this.version } -function onwriteError(stream, state, sync, er, cb) { - --state.pendingcb; +SemVer.prototype.toString = function () { + return this.version +} - if (sync) { - // defer the callback if we are being called synchronously - // to avoid piling up things on the stack - pna.nextTick(cb, er); - // this can emit finish, and it will always happen - // after error - pna.nextTick(finishMaybe, stream, state); - stream._writableState.errorEmitted = true; - stream.emit('error', er); - } else { - // the caller expect this to happen before if - // it is async - cb(er); - stream._writableState.errorEmitted = true; - stream.emit('error', er); - // this can emit finish, but finish must - // always follow error - finishMaybe(stream, state); +SemVer.prototype.compare = function (other) { + debug('SemVer.compare', this.version, this.options, other) + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) } -} -function onwriteStateUpdate(state) { - state.writing = false; - state.writecb = null; - state.length -= state.writelen; - state.writelen = 0; + return this.compareMain(other) || this.comparePre(other) } -function onwrite(stream, er) { - var state = stream._writableState; - var sync = state.sync; - var cb = state.writecb; +SemVer.prototype.compareMain = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } - onwriteStateUpdate(state); + return compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch) +} - if (er) onwriteError(stream, state, sync, er, cb);else { - // Check if we're actually ready to finish, but don't emit yet - var finished = needFinish(state); +SemVer.prototype.comparePre = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } - if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { - clearBuffer(stream, state); - } + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 + } - if (sync) { - /**/ - asyncWrite(afterWrite, stream, state, finished, cb); - /**/ + var i = 0 + do { + var a = this.prerelease[i] + var b = other.prerelease[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue } else { - afterWrite(stream, state, finished, cb); + return compareIdentifiers(a, b) } - } + } while (++i) } -function afterWrite(stream, state, finished, cb) { - if (!finished) onwriteDrain(stream, state); - state.pendingcb--; - cb(); - finishMaybe(stream, state); -} +// preminor will bump the version up to the next minor release, and immediately +// down to pre-release. premajor and prepatch work the same way. +SemVer.prototype.inc = function (release, identifier) { + switch (release) { + case 'premajor': + this.prerelease.length = 0 + this.patch = 0 + this.minor = 0 + this.major++ + this.inc('pre', identifier) + break + case 'preminor': + this.prerelease.length = 0 + this.patch = 0 + this.minor++ + this.inc('pre', identifier) + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch', identifier) + this.inc('pre', identifier) + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier) + } + this.inc('pre', identifier) + break -// Must force callback to be called on nextTick, so that we don't -// emit 'drain' before the write() consumer gets the 'false' return -// value, and has a chance to attach a 'drain' listener. -function onwriteDrain(stream, state) { - if (state.length === 0 && state.needDrain) { - state.needDrain = false; - stream.emit('drain'); + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if (this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0) { + this.major++ + } + this.minor = 0 + this.patch = 0 + this.prerelease = [] + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++ + } + this.patch = 0 + this.prerelease = [] + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++ + } + this.prerelease = [] + break + // This probably shouldn't be used publicly. + // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. + case 'pre': + if (this.prerelease.length === 0) { + this.prerelease = [0] + } else { + var i = this.prerelease.length + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++ + i = -2 + } + } + if (i === -1) { + // didn't increment anything + this.prerelease.push(0) + } + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + if (this.prerelease[0] === identifier) { + if (isNaN(this.prerelease[1])) { + this.prerelease = [identifier, 0] + } + } else { + this.prerelease = [identifier, 0] + } + } + break + + default: + throw new Error('invalid increment argument: ' + release) } + this.format() + this.raw = this.version + return this } -// if there's something in the buffer waiting, then process it -function clearBuffer(stream, state) { - state.bufferProcessing = true; - var entry = state.bufferedRequest; - - if (stream._writev && entry && entry.next) { - // Fast case, write everything using _writev() - var l = state.bufferedRequestCount; - var buffer = new Array(l); - var holder = state.corkedRequestsFree; - holder.entry = entry; - - var count = 0; - var allBuffers = true; - while (entry) { - buffer[count] = entry; - if (!entry.isBuf) allBuffers = false; - entry = entry.next; - count += 1; - } - buffer.allBuffers = allBuffers; +exports.inc = inc +function inc (version, release, loose, identifier) { + if (typeof (loose) === 'string') { + identifier = loose + loose = undefined + } - doWrite(stream, state, true, state.length, buffer, '', holder.finish); + try { + return new SemVer(version, loose).inc(release, identifier).version + } catch (er) { + return null + } +} - // doWrite is almost always async, defer these to save a bit of time - // as the hot path ends with doWrite - state.pendingcb++; - state.lastBufferedRequest = null; - if (holder.next) { - state.corkedRequestsFree = holder.next; - holder.next = null; - } else { - state.corkedRequestsFree = new CorkedRequest(state); - } - state.bufferedRequestCount = 0; +exports.diff = diff +function diff (version1, version2) { + if (eq(version1, version2)) { + return null } else { - // Slow case, write chunks one-by-one - while (entry) { - var chunk = entry.chunk; - var encoding = entry.encoding; - var cb = entry.callback; - var len = state.objectMode ? 1 : chunk.length; - - doWrite(stream, state, false, len, chunk, encoding, cb); - entry = entry.next; - state.bufferedRequestCount--; - // if we didn't call the onwrite immediately, then - // it means that we need to wait until it does. - // also, that means that the chunk and cb are currently - // being processed, so move the buffer counter past them. - if (state.writing) { - break; + var v1 = parse(version1) + var v2 = parse(version2) + var prefix = '' + if (v1.prerelease.length || v2.prerelease.length) { + prefix = 'pre' + var defaultResult = 'prerelease' + } + for (var key in v1) { + if (key === 'major' || key === 'minor' || key === 'patch') { + if (v1[key] !== v2[key]) { + return prefix + key + } } } + return defaultResult // may be undefined + } +} - if (entry === null) state.lastBufferedRequest = null; +exports.compareIdentifiers = compareIdentifiers + +var numeric = /^[0-9]+$/ +function compareIdentifiers (a, b) { + var anum = numeric.test(a) + var bnum = numeric.test(b) + + if (anum && bnum) { + a = +a + b = +b } - state.bufferedRequest = entry; - state.bufferProcessing = false; + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 } -Writable.prototype._write = function (chunk, encoding, cb) { - cb(new Error('_write() is not implemented')); -}; +exports.rcompareIdentifiers = rcompareIdentifiers +function rcompareIdentifiers (a, b) { + return compareIdentifiers(b, a) +} -Writable.prototype._writev = null; +exports.major = major +function major (a, loose) { + return new SemVer(a, loose).major +} -Writable.prototype.end = function (chunk, encoding, cb) { - var state = this._writableState; +exports.minor = minor +function minor (a, loose) { + return new SemVer(a, loose).minor +} - if (typeof chunk === 'function') { - cb = chunk; - chunk = null; - encoding = null; - } else if (typeof encoding === 'function') { - cb = encoding; - encoding = null; - } +exports.patch = patch +function patch (a, loose) { + return new SemVer(a, loose).patch +} - if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); +exports.compare = compare +function compare (a, b, loose) { + return new SemVer(a, loose).compare(new SemVer(b, loose)) +} - // .end() fully uncorks - if (state.corked) { - state.corked = 1; - this.uncork(); - } +exports.compareLoose = compareLoose +function compareLoose (a, b) { + return compare(a, b, true) +} - // ignore unnecessary end() calls. - if (!state.ending && !state.finished) endWritable(this, state, cb); -}; +exports.rcompare = rcompare +function rcompare (a, b, loose) { + return compare(b, a, loose) +} -function needFinish(state) { - return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +exports.sort = sort +function sort (list, loose) { + return list.sort(function (a, b) { + return exports.compare(a, b, loose) + }) +} + +exports.rsort = rsort +function rsort (list, loose) { + return list.sort(function (a, b) { + return exports.rcompare(a, b, loose) + }) } -function callFinal(stream, state) { - stream._final(function (err) { - state.pendingcb--; - if (err) { - stream.emit('error', err); - } - state.prefinished = true; - stream.emit('prefinish'); - finishMaybe(stream, state); - }); + +exports.gt = gt +function gt (a, b, loose) { + return compare(a, b, loose) > 0 } -function prefinish(stream, state) { - if (!state.prefinished && !state.finalCalled) { - if (typeof stream._final === 'function') { - state.pendingcb++; - state.finalCalled = true; - pna.nextTick(callFinal, stream, state); - } else { - state.prefinished = true; - stream.emit('prefinish'); - } - } + +exports.lt = lt +function lt (a, b, loose) { + return compare(a, b, loose) < 0 } -function finishMaybe(stream, state) { - var need = needFinish(state); - if (need) { - prefinish(stream, state); - if (state.pendingcb === 0) { - state.finished = true; - stream.emit('finish'); - } - } - return need; +exports.eq = eq +function eq (a, b, loose) { + return compare(a, b, loose) === 0 } -function endWritable(stream, state, cb) { - state.ending = true; - finishMaybe(stream, state); - if (cb) { - if (state.finished) pna.nextTick(cb);else stream.once('finish', cb); - } - state.ended = true; - stream.writable = false; +exports.neq = neq +function neq (a, b, loose) { + return compare(a, b, loose) !== 0 } -function onCorkedFinish(corkReq, state, err) { - var entry = corkReq.entry; - corkReq.entry = null; - while (entry) { - var cb = entry.callback; - state.pendingcb--; - cb(err); - entry = entry.next; - } - if (state.corkedRequestsFree) { - state.corkedRequestsFree.next = corkReq; - } else { - state.corkedRequestsFree = corkReq; - } +exports.gte = gte +function gte (a, b, loose) { + return compare(a, b, loose) >= 0 } -Object.defineProperty(Writable.prototype, 'destroyed', { - get: function () { - if (this._writableState === undefined) { - return false; - } - return this._writableState.destroyed; - }, - set: function (value) { - // we ignore the value if the stream - // has not been initialized yet - if (!this._writableState) { - return; - } +exports.lte = lte +function lte (a, b, loose) { + return compare(a, b, loose) <= 0 +} - // backward compatibility, the user is explicitly - // managing destroyed - this._writableState.destroyed = value; - } -}); +exports.cmp = cmp +function cmp (a, op, b, loose) { + switch (op) { + case '===': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a === b -Writable.prototype.destroy = destroyImpl.destroy; -Writable.prototype._undestroy = destroyImpl.undestroy; -Writable.prototype._destroy = function (err, cb) { - this.end(); - cb(err); -}; + case '!==': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a !== b -/***/ }), + case '': + case '=': + case '==': + return eq(a, b, loose) -/***/ 42480: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + case '!=': + return neq(a, b, loose) -"use strict"; + case '>': + return gt(a, b, loose) + case '>=': + return gte(a, b, loose) -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + case '<': + return lt(a, b, loose) -var Buffer = __nccwpck_require__(1206).Buffer; -var util = __nccwpck_require__(31669); + case '<=': + return lte(a, b, loose) -function copyBuffer(src, target, offset) { - src.copy(target, offset); + default: + throw new TypeError('Invalid operator: ' + op) + } } -module.exports = function () { - function BufferList() { - _classCallCheck(this, BufferList); +exports.Comparator = Comparator +function Comparator (comp, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } - this.head = null; - this.tail = null; - this.length = 0; + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp + } else { + comp = comp.value + } } - BufferList.prototype.push = function push(v) { - var entry = { data: v, next: null }; - if (this.length > 0) this.tail.next = entry;else this.head = entry; - this.tail = entry; - ++this.length; - }; + if (!(this instanceof Comparator)) { + return new Comparator(comp, options) + } - BufferList.prototype.unshift = function unshift(v) { - var entry = { data: v, next: this.head }; - if (this.length === 0) this.tail = entry; - this.head = entry; - ++this.length; - }; + debug('comparator', comp, options) + this.options = options + this.loose = !!options.loose + this.parse(comp) - BufferList.prototype.shift = function shift() { - if (this.length === 0) return; - var ret = this.head.data; - if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; - --this.length; - return ret; - }; + if (this.semver === ANY) { + this.value = '' + } else { + this.value = this.operator + this.semver.version + } - BufferList.prototype.clear = function clear() { - this.head = this.tail = null; - this.length = 0; - }; + debug('comp', this) +} - BufferList.prototype.join = function join(s) { - if (this.length === 0) return ''; - var p = this.head; - var ret = '' + p.data; - while (p = p.next) { - ret += s + p.data; - }return ret; - }; +var ANY = {} +Comparator.prototype.parse = function (comp) { + var r = this.options.loose ? re[COMPARATORLOOSE] : re[COMPARATOR] + var m = comp.match(r) - BufferList.prototype.concat = function concat(n) { - if (this.length === 0) return Buffer.alloc(0); - if (this.length === 1) return this.head.data; - var ret = Buffer.allocUnsafe(n >>> 0); - var p = this.head; - var i = 0; - while (p) { - copyBuffer(p.data, ret, i); - i += p.data.length; - p = p.next; - } - return ret; - }; + if (!m) { + throw new TypeError('Invalid comparator: ' + comp) + } - return BufferList; -}(); + this.operator = m[1] + if (this.operator === '=') { + this.operator = '' + } -if (util && util.inspect && util.inspect.custom) { - module.exports.prototype[util.inspect.custom] = function () { - var obj = util.inspect({ length: this.length }); - return this.constructor.name + ' ' + obj; - }; + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY + } else { + this.semver = new SemVer(m[2], this.options.loose) + } } -/***/ }), - -/***/ 13166: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; +Comparator.prototype.toString = function () { + return this.value +} +Comparator.prototype.test = function (version) { + debug('Comparator.test', version, this.options.loose) -/**/ + if (this.semver === ANY) { + return true + } -var pna = __nccwpck_require__(47810); -/**/ + if (typeof version === 'string') { + version = new SemVer(version, this.options) + } -// undocumented cb() API, needed for core, not for public API -function destroy(err, cb) { - var _this = this; + return cmp(version, this.operator, this.semver, this.options) +} - var readableDestroyed = this._readableState && this._readableState.destroyed; - var writableDestroyed = this._writableState && this._writableState.destroyed; +Comparator.prototype.intersects = function (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') + } - if (readableDestroyed || writableDestroyed) { - if (cb) { - cb(err); - } else if (err && (!this._writableState || !this._writableState.errorEmitted)) { - pna.nextTick(emitErrorNT, this, err); + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false } - return this; } - // we set destroyed to true before firing error callbacks in order - // to make it re-entrance safe in case destroy() is called within callbacks + var rangeTmp - if (this._readableState) { - this._readableState.destroyed = true; + if (this.operator === '') { + rangeTmp = new Range(comp.value, options) + return satisfies(this.value, rangeTmp, options) + } else if (comp.operator === '') { + rangeTmp = new Range(this.value, options) + return satisfies(comp.semver, rangeTmp, options) } - // if this is a duplex stream mark the writable part as destroyed as well - if (this._writableState) { - this._writableState.destroyed = true; + var sameDirectionIncreasing = + (this.operator === '>=' || this.operator === '>') && + (comp.operator === '>=' || comp.operator === '>') + var sameDirectionDecreasing = + (this.operator === '<=' || this.operator === '<') && + (comp.operator === '<=' || comp.operator === '<') + var sameSemVer = this.semver.version === comp.semver.version + var differentDirectionsInclusive = + (this.operator === '>=' || this.operator === '<=') && + (comp.operator === '>=' || comp.operator === '<=') + var oppositeDirectionsLessThan = + cmp(this.semver, '<', comp.semver, options) && + ((this.operator === '>=' || this.operator === '>') && + (comp.operator === '<=' || comp.operator === '<')) + var oppositeDirectionsGreaterThan = + cmp(this.semver, '>', comp.semver, options) && + ((this.operator === '<=' || this.operator === '<') && + (comp.operator === '>=' || comp.operator === '>')) + + return sameDirectionIncreasing || sameDirectionDecreasing || + (sameSemVer && differentDirectionsInclusive) || + oppositeDirectionsLessThan || oppositeDirectionsGreaterThan +} + +exports.Range = Range +function Range (range, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } } - this._destroy(err || null, function (err) { - if (!cb && err) { - pna.nextTick(emitErrorNT, _this, err); - if (_this._writableState) { - _this._writableState.errorEmitted = true; - } - } else if (cb) { - cb(err); + if (range instanceof Range) { + if (range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease) { + return range + } else { + return new Range(range.raw, options) } - }); + } - return this; -} + if (range instanceof Comparator) { + return new Range(range.value, options) + } -function undestroy() { - if (this._readableState) { - this._readableState.destroyed = false; - this._readableState.reading = false; - this._readableState.ended = false; - this._readableState.endEmitted = false; + if (!(this instanceof Range)) { + return new Range(range, options) } - if (this._writableState) { - this._writableState.destroyed = false; - this._writableState.ended = false; - this._writableState.ending = false; - this._writableState.finished = false; - this._writableState.errorEmitted = false; + this.options = options + this.loose = !!options.loose + this.includePrerelease = !!options.includePrerelease + + // First, split based on boolean or || + this.raw = range + this.set = range.split(/\s*\|\|\s*/).map(function (range) { + return this.parseRange(range.trim()) + }, this).filter(function (c) { + // throw out any that are not relevant for whatever reason + return c.length + }) + + if (!this.set.length) { + throw new TypeError('Invalid SemVer Range: ' + range) } + + this.format() } -function emitErrorNT(self, err) { - self.emit('error', err); +Range.prototype.format = function () { + this.range = this.set.map(function (comps) { + return comps.join(' ').trim() + }).join('||').trim() + return this.range } -module.exports = { - destroy: destroy, - undestroy: undestroy -}; +Range.prototype.toString = function () { + return this.range +} -/***/ }), +Range.prototype.parseRange = function (range) { + var loose = this.options.loose + range = range.trim() + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE] + range = range.replace(hr, hyphenReplace) + debug('hyphen replace', range) + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range, re[COMPARATORTRIM]) -/***/ 78743: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // `~ 1.2.3` => `~1.2.3` + range = range.replace(re[TILDETRIM], tildeTrimReplace) -module.exports = __nccwpck_require__(92413); + // `^ 1.2.3` => `^1.2.3` + range = range.replace(re[CARETTRIM], caretTrimReplace) + // normalize spaces + range = range.split(/\s+/).join(' ') -/***/ }), + // At this point, the range is completely trimmed and + // ready to be split into comparators. -/***/ 61017: -/***/ ((module, exports, __nccwpck_require__) => { + var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR] + var set = range.split(' ').map(function (comp) { + return parseComparator(comp, this.options) + }, this).join(' ').split(/\s+/) + if (this.options.loose) { + // in loose mode, throw out any that are not valid comparators + set = set.filter(function (comp) { + return !!comp.match(compRe) + }) + } + set = set.map(function (comp) { + return new Comparator(comp, this.options) + }, this) -var Stream = __nccwpck_require__(92413); -if (process.env.READABLE_STREAM === 'disable' && Stream) { - module.exports = Stream; - exports = module.exports = Stream.Readable; - exports.Readable = Stream.Readable; - exports.Writable = Stream.Writable; - exports.Duplex = Stream.Duplex; - exports.Transform = Stream.Transform; - exports.PassThrough = Stream.PassThrough; - exports.Stream = Stream; -} else { - exports = module.exports = __nccwpck_require__(78680); - exports.Stream = Stream || exports; - exports.Readable = exports; - exports.Writable = __nccwpck_require__(81008); - exports.Duplex = __nccwpck_require__(26637); - exports.Transform = __nccwpck_require__(6500); - exports.PassThrough = __nccwpck_require__(6671); + return set } +Range.prototype.intersects = function (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') + } -/***/ }), - -/***/ 1206: -/***/ ((module, exports, __nccwpck_require__) => { + return this.set.some(function (thisComparators) { + return thisComparators.every(function (thisComparator) { + return range.set.some(function (rangeComparators) { + return rangeComparators.every(function (rangeComparator) { + return thisComparator.intersects(rangeComparator, options) + }) + }) + }) + }) +} -/* eslint-disable node/no-deprecated-api */ -var buffer = __nccwpck_require__(64293) -var Buffer = buffer.Buffer +// Mostly just for testing and legacy API reasons +exports.toComparators = toComparators +function toComparators (range, options) { + return new Range(range, options).set.map(function (comp) { + return comp.map(function (c) { + return c.value + }).join(' ').trim().split(' ') + }) +} -// alternative to using Object.keys for old browsers -function copyProps (src, dst) { - for (var key in src) { - dst[key] = src[key] - } +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +function parseComparator (comp, options) { + debug('comp', comp, options) + comp = replaceCarets(comp, options) + debug('caret', comp) + comp = replaceTildes(comp, options) + debug('tildes', comp) + comp = replaceXRanges(comp, options) + debug('xrange', comp) + comp = replaceStars(comp, options) + debug('stars', comp) + return comp } -if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { - module.exports = buffer -} else { - // Copy properties from require('buffer') - copyProps(buffer, exports) - exports.Buffer = SafeBuffer + +function isX (id) { + return !id || id.toLowerCase() === 'x' || id === '*' } -function SafeBuffer (arg, encodingOrOffset, length) { - return Buffer(arg, encodingOrOffset, length) +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 +function replaceTildes (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceTilde(comp, options) + }).join(' ') } -// Copy static methods from Buffer -copyProps(Buffer, SafeBuffer) +function replaceTilde (comp, options) { + var r = options.loose ? re[TILDELOOSE] : re[TILDE] + return comp.replace(r, function (_, M, m, p, pr) { + debug('tilde', comp, _, M, m, p, pr) + var ret -SafeBuffer.from = function (arg, encodingOrOffset, length) { - if (typeof arg === 'number') { - throw new TypeError('Argument must not be a number') - } - return Buffer(arg, encodingOrOffset, length) + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0 + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else if (pr) { + debug('replaceTilde pr', pr) + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } else { + // ~1.2.3 == >=1.2.3 <1.3.0 + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } + + debug('tilde return', ret) + return ret + }) } -SafeBuffer.alloc = function (size, fill, encoding) { - if (typeof size !== 'number') { - throw new TypeError('Argument must be a number') - } - var buf = Buffer(size) - if (fill !== undefined) { - if (typeof encoding === 'string') { - buf.fill(fill, encoding) +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 +// ^1.2.3 --> >=1.2.3 <2.0.0 +// ^1.2.0 --> >=1.2.0 <2.0.0 +function replaceCarets (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceCaret(comp, options) + }).join(' ') +} + +function replaceCaret (comp, options) { + debug('caret', comp, options) + var r = options.loose ? re[CARETLOOSE] : re[CARET] + return comp.replace(r, function (_, M, m, p, pr) { + debug('caret', comp, _, M, m, p, pr) + var ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + if (M === '0') { + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else { + ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' + } + } else if (pr) { + debug('replaceCaret pr', pr) + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + (+M + 1) + '.0.0' + } } else { - buf.fill(fill) + debug('no pr') + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + (+M + 1) + '.0.0' + } } - } else { - buf.fill(0) - } - return buf -} -SafeBuffer.allocUnsafe = function (size) { - if (typeof size !== 'number') { - throw new TypeError('Argument must be a number') - } - return Buffer(size) + debug('caret return', ret) + return ret + }) } -SafeBuffer.allocUnsafeSlow = function (size) { - if (typeof size !== 'number') { - throw new TypeError('Argument must be a number') - } - return buffer.SlowBuffer(size) +function replaceXRanges (comp, options) { + debug('replaceXRanges', comp, options) + return comp.split(/\s+/).map(function (comp) { + return replaceXRange(comp, options) + }).join(' ') } +function replaceXRange (comp, options) { + comp = comp.trim() + var r = options.loose ? re[XRANGELOOSE] : re[XRANGE] + return comp.replace(r, function (ret, gtlt, M, m, p, pr) { + debug('xRange', comp, ret, gtlt, M, m, p, pr) + var xM = isX(M) + var xm = xM || isX(m) + var xp = xm || isX(p) + var anyX = xp -/***/ }), + if (gtlt === '=' && anyX) { + gtlt = '' + } -/***/ 7545: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0' + } else { + // nothing is forbidden + ret = '*' + } + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0 + } + p = 0 -"use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + // >1.2.3 => >= 1.2.4 + gtlt = '>=' + if (xm) { + M = +M + 1 + m = 0 + p = 0 + } else { + m = +m + 1 + p = 0 + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) { + M = +M + 1 + } else { + m = +m + 1 + } + } + ret = gtlt + M + '.' + m + '.' + p + } else if (xm) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (xp) { + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } + debug('xRange return', ret) -/**/ + return ret + }) +} -var Buffer = __nccwpck_require__(1206).Buffer; -/**/ +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +function replaceStars (comp, options) { + debug('replaceStars', comp, options) + // Looseness is ignored here. star is always as loose as it gets! + return comp.trim().replace(re[STAR], '') +} -var isEncoding = Buffer.isEncoding || function (encoding) { - encoding = '' + encoding; - switch (encoding && encoding.toLowerCase()) { - case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': - return true; - default: - return false; +// This function is passed to string.replace(re[HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0 +function hyphenReplace ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr, tb) { + if (isX(fM)) { + from = '' + } else if (isX(fm)) { + from = '>=' + fM + '.0.0' + } else if (isX(fp)) { + from = '>=' + fM + '.' + fm + '.0' + } else { + from = '>=' + from } -}; -function _normalizeEncoding(enc) { - if (!enc) return 'utf8'; - var retried; - while (true) { - switch (enc) { - case 'utf8': - case 'utf-8': - return 'utf8'; - case 'ucs2': - case 'ucs-2': - case 'utf16le': - case 'utf-16le': - return 'utf16le'; - case 'latin1': - case 'binary': - return 'latin1'; - case 'base64': - case 'ascii': - case 'hex': - return enc; - default: - if (retried) return; // undefined - enc = ('' + enc).toLowerCase(); - retried = true; - } + if (isX(tM)) { + to = '' + } else if (isX(tm)) { + to = '<' + (+tM + 1) + '.0.0' + } else if (isX(tp)) { + to = '<' + tM + '.' + (+tm + 1) + '.0' + } else if (tpr) { + to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr + } else { + to = '<=' + to } -}; -// Do not cache `Buffer.isEncoding` when checking encoding names as some -// modules monkey-patch it to support additional encodings -function normalizeEncoding(enc) { - var nenc = _normalizeEncoding(enc); - if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); - return nenc || enc; + return (from + ' ' + to).trim() } -// StringDecoder provides an interface for efficiently splitting a series of -// buffers into a series of JS strings without breaking apart multi-byte -// characters. -exports.s = StringDecoder; -function StringDecoder(encoding) { - this.encoding = normalizeEncoding(encoding); - var nb; - switch (this.encoding) { - case 'utf16le': - this.text = utf16Text; - this.end = utf16End; - nb = 4; - break; - case 'utf8': - this.fillLast = utf8FillLast; - nb = 4; - break; - case 'base64': - this.text = base64Text; - this.end = base64End; - nb = 3; - break; - default: - this.write = simpleWrite; - this.end = simpleEnd; - return; +// if ANY of the sets match ALL of its comparators, then pass +Range.prototype.test = function (version) { + if (!version) { + return false } - this.lastNeed = 0; - this.lastTotal = 0; - this.lastChar = Buffer.allocUnsafe(nb); + + if (typeof version === 'string') { + version = new SemVer(version, this.options) + } + + for (var i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true + } + } + return false } -StringDecoder.prototype.write = function (buf) { - if (buf.length === 0) return ''; - var r; - var i; - if (this.lastNeed) { - r = this.fillLast(buf); - if (r === undefined) return ''; - i = this.lastNeed; - this.lastNeed = 0; - } else { - i = 0; +function testSet (set, version, options) { + for (var i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false + } } - if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); - return r || ''; -}; -StringDecoder.prototype.end = utf8End; + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (i = 0; i < set.length; i++) { + debug(set[i].semver) + if (set[i].semver === ANY) { + continue + } -// Returns only complete characters in a Buffer -StringDecoder.prototype.text = utf8Text; + if (set[i].semver.prerelease.length > 0) { + var allowed = set[i].semver + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true + } + } + } -// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer -StringDecoder.prototype.fillLast = function (buf) { - if (this.lastNeed <= buf.length) { - buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); - return this.lastChar.toString(this.encoding, 0, this.lastTotal); + // Version has a -pre, but it's not one of the ones we like. + return false } - buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); - this.lastNeed -= buf.length; -}; -// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a -// continuation byte. If an invalid byte is detected, -2 is returned. -function utf8CheckByte(byte) { - if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; - return byte >> 6 === 0x02 ? -1 : -2; + return true } -// Checks at most 3 bytes at the end of a Buffer in order to detect an -// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) -// needed to complete the UTF-8 character (if applicable) are returned. -function utf8CheckIncomplete(self, buf, i) { - var j = buf.length - 1; - if (j < i) return 0; - var nb = utf8CheckByte(buf[j]); - if (nb >= 0) { - if (nb > 0) self.lastNeed = nb - 1; - return nb; +exports.satisfies = satisfies +function satisfies (version, range, options) { + try { + range = new Range(range, options) + } catch (er) { + return false } - if (--j < i || nb === -2) return 0; - nb = utf8CheckByte(buf[j]); - if (nb >= 0) { - if (nb > 0) self.lastNeed = nb - 2; - return nb; + return range.test(version) +} + +exports.maxSatisfying = maxSatisfying +function maxSatisfying (versions, range, options) { + var max = null + var maxSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null } - if (--j < i || nb === -2) return 0; - nb = utf8CheckByte(buf[j]); - if (nb >= 0) { - if (nb > 0) { - if (nb === 2) nb = 0;else self.lastNeed = nb - 3; + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v + maxSV = new SemVer(max, options) + } } - return nb; - } - return 0; + }) + return max } -// Validates as many continuation bytes for a multi-byte UTF-8 character as -// needed or are available. If we see a non-continuation byte where we expect -// one, we "replace" the validated continuation bytes we've seen so far with -// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding -// behavior. The continuation byte check is included three times in the case -// where all of the continuation bytes for a character exist in the same buffer. -// It is also done this way as a slight performance increase instead of using a -// loop. -function utf8CheckExtraBytes(self, buf, p) { - if ((buf[0] & 0xC0) !== 0x80) { - self.lastNeed = 0; - return '\ufffd'; +exports.minSatisfying = minSatisfying +function minSatisfying (versions, range, options) { + var min = null + var minSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null } - if (self.lastNeed > 1 && buf.length > 1) { - if ((buf[1] & 0xC0) !== 0x80) { - self.lastNeed = 1; - return '\ufffd'; + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v + minSV = new SemVer(min, options) + } } - if (self.lastNeed > 2 && buf.length > 2) { - if ((buf[2] & 0xC0) !== 0x80) { - self.lastNeed = 2; - return '\ufffd'; + }) + return min +} + +exports.minVersion = minVersion +function minVersion (range, loose) { + range = new Range(range, loose) + + var minver = new SemVer('0.0.0') + if (range.test(minver)) { + return minver + } + + minver = new SemVer('0.0.0-0') + if (range.test(minver)) { + return minver + } + + minver = null + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] + + comparators.forEach(function (comparator) { + // Clone to avoid manipulating the comparator's semver object. + var compver = new SemVer(comparator.semver.version) + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++ + } else { + compver.prerelease.push(0) + } + compver.raw = compver.format() + /* fallthrough */ + case '': + case '>=': + if (!minver || gt(minver, compver)) { + minver = compver + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error('Unexpected operation: ' + comparator.operator) } - } + }) } -} -// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. -function utf8FillLast(buf) { - var p = this.lastTotal - this.lastNeed; - var r = utf8CheckExtraBytes(this, buf, p); - if (r !== undefined) return r; - if (this.lastNeed <= buf.length) { - buf.copy(this.lastChar, p, 0, this.lastNeed); - return this.lastChar.toString(this.encoding, 0, this.lastTotal); + if (minver && range.test(minver)) { + return minver } - buf.copy(this.lastChar, p, 0, buf.length); - this.lastNeed -= buf.length; + + return null } -// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a -// partial character, the character's bytes are buffered until the required -// number of bytes are available. -function utf8Text(buf, i) { - var total = utf8CheckIncomplete(this, buf, i); - if (!this.lastNeed) return buf.toString('utf8', i); - this.lastTotal = total; - var end = buf.length - (total - this.lastNeed); - buf.copy(this.lastChar, 0, end); - return buf.toString('utf8', i, end); +exports.validRange = validRange +function validRange (range, options) { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, options).range || '*' + } catch (er) { + return null + } } -// For UTF-8, a replacement character is added when ending on a partial -// character. -function utf8End(buf) { - var r = buf && buf.length ? this.write(buf) : ''; - if (this.lastNeed) return r + '\ufffd'; - return r; +// Determine if version is less than all the versions possible in the range +exports.ltr = ltr +function ltr (version, range, options) { + return outside(version, range, '<', options) } -// UTF-16LE typically needs two bytes per character, but even if we have an even -// number of bytes available, we need to check if we end on a leading/high -// surrogate. In that case, we need to wait for the next two bytes in order to -// decode the last character properly. -function utf16Text(buf, i) { - if ((buf.length - i) % 2 === 0) { - var r = buf.toString('utf16le', i); - if (r) { - var c = r.charCodeAt(r.length - 1); - if (c >= 0xD800 && c <= 0xDBFF) { - this.lastNeed = 2; - this.lastTotal = 4; - this.lastChar[0] = buf[buf.length - 2]; - this.lastChar[1] = buf[buf.length - 1]; - return r.slice(0, -1); - } - } - return r; - } - this.lastNeed = 1; - this.lastTotal = 2; - this.lastChar[0] = buf[buf.length - 1]; - return buf.toString('utf16le', i, buf.length - 1); +// Determine if version is greater than all the versions possible in the range. +exports.gtr = gtr +function gtr (version, range, options) { + return outside(version, range, '>', options) } -// For UTF-16LE we do not explicitly append special replacement characters if we -// end on a partial character, we simply let v8 handle that. -function utf16End(buf) { - var r = buf && buf.length ? this.write(buf) : ''; - if (this.lastNeed) { - var end = this.lastTotal - this.lastNeed; - return r + this.lastChar.toString('utf16le', 0, end); +exports.outside = outside +function outside (version, range, hilo, options) { + version = new SemVer(version, options) + range = new Range(range, options) + + var gtfn, ltefn, ltfn, comp, ecomp + switch (hilo) { + case '>': + gtfn = gt + ltefn = lte + ltfn = lt + comp = '>' + ecomp = '>=' + break + case '<': + gtfn = lt + ltefn = gte + ltfn = gt + comp = '<' + ecomp = '<=' + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') } - return r; -} -function base64Text(buf, i) { - var n = (buf.length - i) % 3; - if (n === 0) return buf.toString('base64', i); - this.lastNeed = 3 - n; - this.lastTotal = 3; - if (n === 1) { - this.lastChar[0] = buf[buf.length - 1]; - } else { - this.lastChar[0] = buf[buf.length - 2]; - this.lastChar[1] = buf[buf.length - 1]; + // If it satisifes the range it is not outside + if (satisfies(version, range, options)) { + return false } - return buf.toString('base64', i, buf.length - n); -} -function base64End(buf) { - var r = buf && buf.length ? this.write(buf) : ''; - if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); - return r; -} + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. -// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) -function simpleWrite(buf) { - return buf.toString(this.encoding); -} + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] -function simpleEnd(buf) { - return buf && buf.length ? this.write(buf) : ''; -} + var high = null + var low = null -/***/ }), + comparators.forEach(function (comparator) { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator + low = low || comparator + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator + } + }) -/***/ 34266: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false + } -"use strict"; + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false + } + } + return true +} -const Duplex = __nccwpck_require__(92413).Duplex; +exports.prerelease = prerelease +function prerelease (version, options) { + var parsed = parse(version, options) + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null +} -const kCallback = Symbol('Callback'); -const kOtherSide = Symbol('Other'); +exports.intersects = intersects +function intersects (r1, r2, options) { + r1 = new Range(r1, options) + r2 = new Range(r2, options) + return r1.intersects(r2) +} -class DuplexSocket extends Duplex { - constructor(options) { - super(options); - this[kCallback] = null; - this[kOtherSide] = null; +exports.coerce = coerce +function coerce (version) { + if (version instanceof SemVer) { + return version } - _read() { - const callback = this[kCallback]; - if (callback) { - this[kCallback] = null; - callback(); - } + if (typeof version !== 'string') { + return null } - _write(chunk, encoding, callback) { - this[kOtherSide][kCallback] = callback; - this[kOtherSide].push(chunk); - } + var match = version.match(re[COERCE]) - _final(callback) { - this[kOtherSide].on('end', callback); - this[kOtherSide].push(null); + if (match == null) { + return null } -} -class DuplexPair { - constructor(options) { - this.socket1 = new DuplexSocket(options); - this.socket2 = new DuplexSocket(options); - this.socket1[kOtherSide] = this.socket2; - this.socket2[kOtherSide] = this.socket1; - } + return parse(match[1] + + '.' + (match[2] || '0') + + '.' + (match[3] || '0')) } -module.exports = DuplexPair; - /***/ }), -/***/ 83533: +/***/ 64314: /***/ ((module, exports, __nccwpck_require__) => { "use strict"; +var Progress = __nccwpck_require__(11083) +var Gauge = __nccwpck_require__(51800) +var EE = __nccwpck_require__(28614).EventEmitter +var log = exports = module.exports = new EE() +var util = __nccwpck_require__(31669) -/** - * Module exports. - */ +var setBlocking = __nccwpck_require__(79344) +var consoleControl = __nccwpck_require__(73645) -module.exports = exports; +setBlocking(true) +var stream = process.stderr +Object.defineProperty(log, 'stream', { + set: function (newStream) { + stream = newStream + if (this.gauge) this.gauge.setWriteTo(stream, stream) + }, + get: function () { + return stream + } +}) -/** - * Module dependencies. - */ +// by default, decide based on tty-ness. +var colorEnabled +log.useColor = function () { + return colorEnabled != null ? colorEnabled : stream.isTTY +} -var fs = __nccwpck_require__(35747); -var path = __nccwpck_require__(85622); -var nopt = __nccwpck_require__(41743); -var log = __nccwpck_require__(64314); -log.disableProgress(); -var napi = __nccwpck_require__(10480); +log.enableColor = function () { + colorEnabled = true + this.gauge.setTheme({hasColor: colorEnabled, hasUnicode: unicodeEnabled}) +} +log.disableColor = function () { + colorEnabled = false + this.gauge.setTheme({hasColor: colorEnabled, hasUnicode: unicodeEnabled}) +} -var EE = __nccwpck_require__(28614).EventEmitter; -var inherits = __nccwpck_require__(31669).inherits; -var commands = [ - 'clean', - 'install', - 'reinstall', - 'build', - 'rebuild', - 'package', - 'testpackage', - 'publish', - 'unpublish', - 'info', - 'testbinary', - 'reveal', - 'configure' - ]; -var aliases = {}; +// default level +log.level = 'info' -// differentiate node-pre-gyp's logs from npm's -log.heading = 'node-pre-gyp'; +log.gauge = new Gauge(stream, { + enabled: false, // no progress bars unless asked + theme: {hasColor: log.useColor()}, + template: [ + {type: 'progressbar', length: 20}, + {type: 'activityIndicator', kerning: 1, length: 1}, + {type: 'section', default: ''}, + ':', + {type: 'logline', kerning: 1, default: ''} + ] +}) -exports.find = __nccwpck_require__(92800).find; +log.tracker = new Progress.TrackerGroup() -function Run() { - var self = this; +// we track this separately as we may need to temporarily disable the +// display of the status bar for our own loggy purposes. +log.progressEnabled = log.gauge.isEnabled() - this.commands = {}; +var unicodeEnabled - commands.forEach(function (command) { - self.commands[command] = function (argv, callback) { - log.verbose('command', command, argv); - return require('./' + command)(self, argv, callback); - }; - }); +log.enableUnicode = function () { + unicodeEnabled = true + this.gauge.setTheme({hasColor: this.useColor(), hasUnicode: unicodeEnabled}) } -inherits(Run, EE); -exports.Run = Run; -var proto = Run.prototype; -/** - * Export the contents of the package.json. - */ +log.disableUnicode = function () { + unicodeEnabled = false + this.gauge.setTheme({hasColor: this.useColor(), hasUnicode: unicodeEnabled}) +} -proto.package = __nccwpck_require__(16398); +log.setGaugeThemeset = function (themes) { + this.gauge.setThemeset(themes) +} -/** - * nopt configuration definitions - */ +log.setGaugeTemplate = function (template) { + this.gauge.setTemplate(template) +} -proto.configDefs = { - help: Boolean, // everywhere - arch: String, // 'configure' - debug: Boolean, // 'build' - directory: String, // bin - proxy: String, // 'install' - loglevel: String, // everywhere -}; +log.enableProgress = function () { + if (this.progressEnabled) return + this.progressEnabled = true + this.tracker.on('change', this.showProgress) + if (this._pause) return + this.gauge.enable() +} -/** - * nopt shorthands - */ +log.disableProgress = function () { + if (!this.progressEnabled) return + this.progressEnabled = false + this.tracker.removeListener('change', this.showProgress) + this.gauge.disable() +} -proto.shorthands = { - release: '--no-debug', - C: '--directory', - debug: '--debug', - j: '--jobs', - silent: '--loglevel=silent', - silly: '--loglevel=silly', - verbose: '--loglevel=verbose', -}; +var trackerConstructors = ['newGroup', 'newItem', 'newStream'] -/** - * expose the command aliases for the bin file to use. - */ +var mixinLog = function (tracker) { + // mixin the public methods from log into the tracker + // (except: conflicts and one's we handle specially) + Object.keys(log).forEach(function (P) { + if (P[0] === '_') return + if (trackerConstructors.filter(function (C) { return C === P }).length) return + if (tracker[P]) return + if (typeof log[P] !== 'function') return + var func = log[P] + tracker[P] = function () { + return func.apply(log, arguments) + } + }) + // if the new tracker is a group, make sure any subtrackers get + // mixed in too + if (tracker instanceof Progress.TrackerGroup) { + trackerConstructors.forEach(function (C) { + var func = tracker[C] + tracker[C] = function () { return mixinLog(func.apply(tracker, arguments)) } + }) + } + return tracker +} -proto.aliases = aliases; +// Add tracker constructors to the top level log object +trackerConstructors.forEach(function (C) { + log[C] = function () { return mixinLog(this.tracker[C].apply(this.tracker, arguments)) } +}) -/** - * Parses the given argv array and sets the 'opts', - * 'argv' and 'command' properties. - */ +log.clearProgress = function (cb) { + if (!this.progressEnabled) return cb && process.nextTick(cb) + this.gauge.hide(cb) +} -proto.parseArgv = function parseOpts (argv) { - this.opts = nopt(this.configDefs, this.shorthands, argv); - this.argv = this.opts.argv.remain.slice(); - var commands = this.todo = []; +log.showProgress = function (name, completed) { + if (!this.progressEnabled) return + var values = {} + if (name) values.section = name + var last = log.record[log.record.length - 1] + if (last) { + values.subsection = last.prefix + var disp = log.disp[last.level] || last.level + var logline = this._format(disp, log.style[last.level]) + if (last.prefix) logline += ' ' + this._format(last.prefix, this.prefixStyle) + logline += ' ' + last.message.split(/\r?\n/)[0] + values.logline = logline + } + values.completed = completed || this.tracker.completed() + this.gauge.show(values) +}.bind(log) // bind for use in tracker's on-change listener - // create a copy of the argv array with aliases mapped - argv = this.argv.map(function (arg) { - // is this an alias? - if (arg in this.aliases) { - arg = this.aliases[arg]; - } - return arg; - }, this); +// temporarily stop emitting, but don't drop +log.pause = function () { + this._paused = true + if (this.progressEnabled) this.gauge.disable() +} - // process the mapped args into "command" objects ("name" and "args" props) - argv.slice().forEach(function (arg) { - if (arg in this.commands) { - var args = argv.splice(0, argv.indexOf(arg)); - argv.shift(); - if (commands.length > 0) { - commands[commands.length - 1].args = args; - } - commands.push({ name: arg, args: [] }); +log.resume = function () { + if (!this._paused) return + this._paused = false + + var b = this._buffer + this._buffer = [] + b.forEach(function (m) { + this.emitLog(m) + }, this) + if (this.progressEnabled) this.gauge.enable() +} + +log._buffer = [] + +var id = 0 +log.record = [] +log.maxRecordSize = 10000 +log.log = function (lvl, prefix, message) { + var l = this.levels[lvl] + if (l === undefined) { + return this.emit('error', new Error(util.format( + 'Undefined log level: %j', lvl))) + } + + var a = new Array(arguments.length - 2) + var stack = null + for (var i = 2; i < arguments.length; i++) { + var arg = a[i - 2] = arguments[i] + + // resolve stack traces to a plain string. + if (typeof arg === 'object' && arg && + (arg instanceof Error) && arg.stack) { + + Object.defineProperty(arg, 'stack', { + value: stack = arg.stack + '', + enumerable: true, + writable: true + }) } - }, this); - if (commands.length > 0) { - commands[commands.length - 1].args = argv.splice(0); } + if (stack) a.unshift(stack + '\n') + message = util.format.apply(util, a) - // expand commands entries for multiple napi builds - var dir = this.opts.directory; - if (dir == null) dir = process.cwd(); - var package_json = JSON.parse(fs.readFileSync(path.join(dir,'package.json'))); + var m = { id: id++, + level: lvl, + prefix: String(prefix || ''), + message: message, + messageRaw: a } - this.todo = napi.expand_commands (package_json, this.opts, commands); + this.emit('log', m) + this.emit('log.' + lvl, m) + if (m.prefix) this.emit(m.prefix, m) - // support for inheriting config env variables from npm - var npm_config_prefix = 'npm_config_'; - Object.keys(process.env).forEach(function (name) { - if (name.indexOf(npm_config_prefix) !== 0) return; - var val = process.env[name]; - if (name === npm_config_prefix + 'loglevel') { - log.level = val; - } else { - // add the user-defined options to the config - name = name.substring(npm_config_prefix.length); - // avoid npm argv clobber already present args - // which avoids problem of 'npm test' calling - // script that runs unique npm install commands - if (name === 'argv') { - if (this.opts.argv && - this.opts.argv.remain && - this.opts.argv.remain.length) { - // do nothing - } else { - this.opts[name] = val; - } - } else { - this.opts[name] = val; - } + this.record.push(m) + var mrs = this.maxRecordSize + var n = this.record.length - mrs + if (n > mrs / 10) { + var newSize = Math.floor(mrs * 0.9) + this.record = this.record.slice(-1 * newSize) + } + + this.emitLog(m) +}.bind(log) + +log.emitLog = function (m) { + if (this._paused) { + this._buffer.push(m) + return + } + if (this.progressEnabled) this.gauge.pulse(m.prefix) + var l = this.levels[m.level] + if (l === undefined) return + if (l < this.levels[this.level]) return + if (l > 0 && !isFinite(l)) return + + // If 'disp' is null or undefined, use the lvl as a default + // Allows: '', 0 as valid disp + var disp = log.disp[m.level] != null ? log.disp[m.level] : m.level + this.clearProgress() + m.message.split(/\r?\n/).forEach(function (line) { + if (this.heading) { + this.write(this.heading, this.headingStyle) + this.write(' ') } - }, this); + this.write(disp, log.style[m.level]) + var p = m.prefix || '' + if (p) this.write(' ') + this.write(p, this.prefixStyle) + this.write(' ' + line + '\n') + }, this) + this.showProgress() +} - if (this.opts.loglevel) { - log.level = this.opts.loglevel; +log._format = function (msg, style) { + if (!stream) return + + var output = '' + if (this.useColor()) { + style = style || {} + var settings = [] + if (style.fg) settings.push(style.fg) + if (style.bg) settings.push('bg' + style.bg[0].toUpperCase() + style.bg.slice(1)) + if (style.bold) settings.push('bold') + if (style.underline) settings.push('underline') + if (style.inverse) settings.push('inverse') + if (settings.length) output += consoleControl.color(settings) + if (style.beep) output += consoleControl.beep() } - log.resume(); -}; + output += msg + if (this.useColor()) { + output += consoleControl.color('reset') + } + return output +} -/** - * Returns the usage instructions for node-pre-gyp. - */ +log.write = function (msg, style) { + if (!stream) return -proto.usage = function usage () { - var str = [ - '', - ' Usage: node-pre-gyp [options]', - '', - ' where is one of:', - commands.map(function (c) { - return ' - ' + c + ' - ' + require('./' + c).usage; - }).join('\n'), - '', - 'node-pre-gyp@' + this.version + ' ' + path.resolve(__dirname, '..'), - 'node@' + process.versions.node - ].join('\n'); - return str; -}; + stream.write(this._format(msg, style)) +} -/** - * Version number getter. - */ +log.addLevel = function (lvl, n, style, disp) { + // If 'disp' is null or undefined, use the lvl as a default + if (disp == null) disp = lvl + this.levels[lvl] = n + this.style[lvl] = style + if (!this[lvl]) { + this[lvl] = function () { + var a = new Array(arguments.length + 1) + a[0] = lvl + for (var i = 0; i < arguments.length; i++) { + a[i + 1] = arguments[i] + } + return this.log.apply(this, a) + }.bind(this) + } + this.disp[lvl] = disp +} -Object.defineProperty(proto, 'version', { - get: function () { - return this.package.version; - }, - enumerable: true -}); +log.prefixStyle = { fg: 'magenta' } +log.headingStyle = { fg: 'white', bg: 'black' } + +log.style = {} +log.levels = {} +log.disp = {} +log.addLevel('silly', -Infinity, { inverse: true }, 'sill') +log.addLevel('verbose', 1000, { fg: 'blue', bg: 'black' }, 'verb') +log.addLevel('info', 2000, { fg: 'green' }) +log.addLevel('timing', 2500, { fg: 'green', bg: 'black' }) +log.addLevel('http', 3000, { fg: 'green', bg: 'black' }) +log.addLevel('notice', 3500, { fg: 'blue', bg: 'black' }) +log.addLevel('warn', 4000, { fg: 'black', bg: 'yellow' }, 'WARN') +log.addLevel('error', 5000, { fg: 'red', bg: 'black' }, 'ERR!') +log.addLevel('silent', Infinity) +// allow 'error' prefix +log.on('error', function () {}) /***/ }), -/***/ 92800: -/***/ ((module, exports, __nccwpck_require__) => { +/***/ 16325: +/***/ ((module) => { "use strict"; +module.exports = Number.isNaN || function (x) { + return x !== x; +}; -var versioning = __nccwpck_require__(10887); -var napi = __nccwpck_require__(10480); -var existsSync = __nccwpck_require__(35747).existsSync || __nccwpck_require__(85622).existsSync; -var path = __nccwpck_require__(85622); -module.exports = exports; +/***/ }), -exports.usage = 'Finds the require path for the node-pre-gyp installed module'; +/***/ 43248: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -exports.validate = function(package_json,opts) { - versioning.validate_config(package_json,opts); -}; +var crypto = __nccwpck_require__(76417) -exports.find = function(package_json_path,opts) { - if (!existsSync(package_json_path)) { - throw new Error("package.json does not exist at " + package_json_path); - } - var package_json = require(package_json_path); - versioning.validate_config(package_json,opts); - var napi_build_version; - if (napi.get_napi_build_versions (package_json, opts)) { - napi_build_version = napi.get_best_napi_build_version(package_json, opts); - } - opts = opts || {}; - if (!opts.module_root) opts.module_root = path.dirname(package_json_path); - var meta = versioning.evaluate(package_json,opts,napi_build_version); - return meta.module; -}; +function sha (key, body, algorithm) { + return crypto.createHmac(algorithm, key).update(body).digest('base64') +} +function rsa (key, body) { + return crypto.createSign('RSA-SHA1').update(body).sign(key, 'base64') +} -/***/ }), +function rfc3986 (str) { + return encodeURIComponent(str) + .replace(/!/g,'%21') + .replace(/\*/g,'%2A') + .replace(/\(/g,'%28') + .replace(/\)/g,'%29') + .replace(/'/g,'%27') +} -/***/ 10480: -/***/ ((module, exports, __nccwpck_require__) => { +// Maps object to bi-dimensional array +// Converts { foo: 'A', bar: [ 'b', 'B' ]} to +// [ ['foo', 'A'], ['bar', 'b'], ['bar', 'B'] ] +function map (obj) { + var key, val, arr = [] + for (key in obj) { + val = obj[key] + if (Array.isArray(val)) + for (var i = 0; i < val.length; i++) + arr.push([key, val[i]]) + else if (typeof val === 'object') + for (var prop in val) + arr.push([key + '[' + prop + ']', val[prop]]) + else + arr.push([key, val]) + } + return arr +} -"use strict"; +// Compare function for sort +function compare (a, b) { + return a > b ? 1 : a < b ? -1 : 0 +} +function generateBase (httpMethod, base_uri, params) { + // adapted from https://dev.twitter.com/docs/auth/oauth and + // https://dev.twitter.com/docs/auth/creating-signature -var fs = __nccwpck_require__(35747); -var rm = __nccwpck_require__(14959); -var log = __nccwpck_require__(64314); + // Parameter normalization + // http://tools.ietf.org/html/rfc5849#section-3.4.1.3.2 + var normalized = map(params) + // 1. First, the name and value of each parameter are encoded + .map(function (p) { + return [ rfc3986(p[0]), rfc3986(p[1] || '') ] + }) + // 2. The parameters are sorted by name, using ascending byte value + // ordering. If two or more parameters share the same name, they + // are sorted by their value. + .sort(function (a, b) { + return compare(a[0], b[0]) || compare(a[1], b[1]) + }) + // 3. The name of each parameter is concatenated to its corresponding + // value using an "=" character (ASCII code 61) as a separator, even + // if the value is empty. + .map(function (p) { return p.join('=') }) + // 4. The sorted name/value pairs are concatenated together into a + // single string by using an "&" character (ASCII code 38) as + // separator. + .join('&') -module.exports = exports; + var base = [ + rfc3986(httpMethod ? httpMethod.toUpperCase() : 'GET'), + rfc3986(base_uri), + rfc3986(normalized) + ].join('&') -var versionArray = process.version - .substr(1) - .replace(/-.*$/, '') - .split('.') - .map(function(item) { - return +item; - }); + return base +} -var napi_multiple_commands = [ - 'build', - 'clean', - 'configure', - 'package', - 'publish', - 'reveal', - 'testbinary', - 'testpackage', - 'unpublish' -]; +function hmacsign (httpMethod, base_uri, params, consumer_secret, token_secret) { + var base = generateBase(httpMethod, base_uri, params) + var key = [ + consumer_secret || '', + token_secret || '' + ].map(rfc3986).join('&') -var napi_build_version_tag = 'napi_build_version='; + return sha(key, base, 'sha1') +} -module.exports.get_napi_version = function(target) { // target may be undefined - // returns the non-zero numeric napi version or undefined if napi is not supported. - // correctly supporting target requires an updated cross-walk - var version = process.versions.napi; // can be undefined - if (!version) { // this code should never need to be updated - if (versionArray[0] === 9 && versionArray[1] >= 3) version = 2; // 9.3.0+ - else if (versionArray[0] === 8) version = 1; // 8.0.0+ - } - return version; -}; +function hmacsign256 (httpMethod, base_uri, params, consumer_secret, token_secret) { + var base = generateBase(httpMethod, base_uri, params) + var key = [ + consumer_secret || '', + token_secret || '' + ].map(rfc3986).join('&') -module.exports.get_napi_version_as_string = function(target) { - // returns the napi version as a string or an empty string if napi is not supported. - var version = module.exports.get_napi_version(target); - return version ? ''+version : ''; -}; + return sha(key, base, 'sha256') +} -module.exports.validate_package_json = function(package_json, opts) { // throws Error +function rsasign (httpMethod, base_uri, params, private_key, token_secret) { + var base = generateBase(httpMethod, base_uri, params) + var key = private_key || '' - var binary = package_json.binary; - var module_path_ok = pathOK(binary.module_path); - var remote_path_ok = pathOK(binary.remote_path); - var package_name_ok = pathOK(binary.package_name); - var napi_build_versions = module.exports.get_napi_build_versions(package_json,opts,true); - var napi_build_versions_raw = module.exports.get_napi_build_versions_raw(package_json); + return rsa(key, base) +} - if (napi_build_versions) { - napi_build_versions.forEach(function(napi_build_version){ - if (!(parseInt(napi_build_version,10) === napi_build_version && napi_build_version > 0)) { - throw new Error("All values specified in napi_versions must be positive integers."); - } - }); - } +function plaintext (consumer_secret, token_secret) { + var key = [ + consumer_secret || '', + token_secret || '' + ].map(rfc3986).join('&') - if (napi_build_versions && (!module_path_ok || (!remote_path_ok && !package_name_ok))) { - throw new Error("When napi_versions is specified; module_path and either remote_path or " + - "package_name must contain the substitution string '{napi_build_version}`."); - } + return key +} - if ((module_path_ok || remote_path_ok || package_name_ok) && !napi_build_versions_raw) { - throw new Error("When the substitution string '{napi_build_version}` is specified in " + - "module_path, remote_path, or package_name; napi_versions must also be specified."); - } +function sign (signMethod, httpMethod, base_uri, params, consumer_secret, token_secret) { + var method + var skipArgs = 1 - if (napi_build_versions && !module.exports.get_best_napi_build_version(package_json, opts) && - module.exports.build_napi_only(package_json)) { - throw new Error( - 'The N-API version of this Node instance is ' + module.exports.get_napi_version(opts ? opts.target : undefined) + '. ' + - 'This module supports N-API version(s) ' + module.exports.get_napi_build_versions_raw(package_json) + '. ' + - 'This Node instance cannot run this module.'); - } + switch (signMethod) { + case 'RSA-SHA1': + method = rsasign + break + case 'HMAC-SHA1': + method = hmacsign + break + case 'HMAC-SHA256': + method = hmacsign256 + break + case 'PLAINTEXT': + method = plaintext + skipArgs = 4 + break + default: + throw new Error('Signature method not supported: ' + signMethod) + } - if (napi_build_versions_raw && !napi_build_versions && module.exports.build_napi_only(package_json)) { - throw new Error( - 'The N-API version of this Node instance is ' + module.exports.get_napi_version(opts ? opts.target : undefined) + '. ' + - 'This module supports N-API version(s) ' + module.exports.get_napi_build_versions_raw(package_json) + '. ' + - 'This Node instance cannot run this module.'); - } + return method.apply(null, [].slice.call(arguments, skipArgs)) +} -}; +exports.hmacsign = hmacsign +exports.hmacsign256 = hmacsign256 +exports.rsasign = rsasign +exports.plaintext = plaintext +exports.sign = sign +exports.rfc3986 = rfc3986 +exports.generateBase = generateBase -function pathOK (path) { - return path && (path.indexOf('{napi_build_version}') !== -1 || path.indexOf('{node_napi_label}') !== -1); +/***/ }), + +/***/ 17426: +/***/ ((module) => { + +"use strict"; +/* +object-assign +(c) Sindre Sorhus +@license MIT +*/ + + +/* eslint-disable no-unused-vars */ +var getOwnPropertySymbols = Object.getOwnPropertySymbols; +var hasOwnProperty = Object.prototype.hasOwnProperty; +var propIsEnumerable = Object.prototype.propertyIsEnumerable; + +function toObject(val) { + if (val === null || val === undefined) { + throw new TypeError('Object.assign cannot be called with null or undefined'); + } + + return Object(val); } -module.exports.expand_commands = function(package_json, opts, commands) { - var expanded_commands = []; - var napi_build_versions = module.exports.get_napi_build_versions(package_json, opts); - commands.forEach(function(command){ - if (napi_build_versions && command.name === 'install') { - var napi_build_version = module.exports.get_best_napi_build_version(package_json, opts); - var args = napi_build_version ? [ napi_build_version_tag+napi_build_version ] : [ ]; - expanded_commands.push ({ name: command.name, args: args }); - } else if (napi_build_versions && napi_multiple_commands.indexOf(command.name) !== -1) { - napi_build_versions.forEach(function(napi_build_version){ - var args = command.args.slice(); - args.push (napi_build_version_tag+napi_build_version); - expanded_commands.push ({ name: command.name, args: args }); - }); - } else { - expanded_commands.push (command); +function shouldUseNative() { + try { + if (!Object.assign) { + return false; } - }); - return expanded_commands; -}; -module.exports.get_napi_build_versions = function(package_json, opts, warnings) { // opts may be undefined - var napi_build_versions = []; - var supported_napi_version = module.exports.get_napi_version(opts ? opts.target : undefined); - // remove duplicates, verify each napi version can actaully be built - if (package_json.binary && package_json.binary.napi_versions) { - package_json.binary.napi_versions.forEach(function(napi_version) { - var duplicated = napi_build_versions.indexOf(napi_version) !== -1; - if (!duplicated && supported_napi_version && napi_version <= supported_napi_version) { - napi_build_versions.push(napi_version); - } else if (warnings && !duplicated && supported_napi_version) { - log.info('This Node instance does not support builds for N-API version', napi_version); - } - }); - } - if (opts && opts["build-latest-napi-version-only"]) { - var latest_version = 0; - napi_build_versions.forEach(function(napi_version) { - if (napi_version > latest_version) latest_version = napi_version; - }); - napi_build_versions = latest_version ? [ latest_version ] : []; - } - return napi_build_versions.length ? napi_build_versions : undefined; -}; + // Detect buggy property enumeration order in older V8 versions. -module.exports.get_napi_build_versions_raw = function(package_json) { - var napi_build_versions = []; - // remove duplicates - if (package_json.binary && package_json.binary.napi_versions) { - package_json.binary.napi_versions.forEach(function(napi_version) { - if (napi_build_versions.indexOf(napi_version) === -1) { - napi_build_versions.push(napi_version); - } - }); - } - return napi_build_versions.length ? napi_build_versions : undefined; -}; + // https://bugs.chromium.org/p/v8/issues/detail?id=4118 + var test1 = new String('abc'); // eslint-disable-line no-new-wrappers + test1[5] = 'de'; + if (Object.getOwnPropertyNames(test1)[0] === '5') { + return false; + } -module.exports.get_command_arg = function(napi_build_version) { - return napi_build_version_tag + napi_build_version; -}; + // https://bugs.chromium.org/p/v8/issues/detail?id=3056 + var test2 = {}; + for (var i = 0; i < 10; i++) { + test2['_' + String.fromCharCode(i)] = i; + } + var order2 = Object.getOwnPropertyNames(test2).map(function (n) { + return test2[n]; + }); + if (order2.join('') !== '0123456789') { + return false; + } -module.exports.get_napi_build_version_from_command_args = function(command_args) { - for (var i = 0; i < command_args.length; i++) { - var arg = command_args[i]; - if (arg.indexOf(napi_build_version_tag) === 0) { - return parseInt(arg.substr(napi_build_version_tag.length),10); + // https://bugs.chromium.org/p/v8/issues/detail?id=3056 + var test3 = {}; + 'abcdefghijklmnopqrst'.split('').forEach(function (letter) { + test3[letter] = letter; + }); + if (Object.keys(Object.assign({}, test3)).join('') !== + 'abcdefghijklmnopqrst') { + return false; } + + return true; + } catch (err) { + // We don't expect any of the above to throw, but better to be safe. + return false; } - return undefined; -}; +} -module.exports.swap_build_dir_out = function(napi_build_version) { - if (napi_build_version) { - rm.sync(module.exports.get_build_dir(napi_build_version)); - fs.renameSync('build', module.exports.get_build_dir(napi_build_version)); - } -}; +module.exports = shouldUseNative() ? Object.assign : function (target, source) { + var from; + var to = toObject(target); + var symbols; -module.exports.swap_build_dir_in = function(napi_build_version) { - if (napi_build_version) { - rm.sync('build'); - fs.renameSync(module.exports.get_build_dir(napi_build_version), 'build'); - } -}; + for (var s = 1; s < arguments.length; s++) { + from = Object(arguments[s]); -module.exports.get_build_dir = function(napi_build_version) { - return 'build-tmp-napi-v'+napi_build_version; -}; + for (var key in from) { + if (hasOwnProperty.call(from, key)) { + to[key] = from[key]; + } + } -module.exports.get_best_napi_build_version = function(package_json, opts) { - var best_napi_build_version = 0; - var napi_build_versions = module.exports.get_napi_build_versions (package_json, opts); - if (napi_build_versions) { - var our_napi_version = module.exports.get_napi_version(opts ? opts.target : undefined); - napi_build_versions.forEach(function(napi_build_version){ - if (napi_build_version > best_napi_build_version && - napi_build_version <= our_napi_version) { - best_napi_build_version = napi_build_version; + if (getOwnPropertySymbols) { + symbols = getOwnPropertySymbols(from); + for (var i = 0; i < symbols.length; i++) { + if (propIsEnumerable.call(from, symbols[i])) { + to[symbols[i]] = from[symbols[i]]; + } } - }); + } } - return best_napi_build_version === 0 ? undefined : best_napi_build_version; -}; -module.exports.build_napi_only = function(package_json) { - return package_json.binary && package_json.binary.package_name && - package_json.binary.package_name.indexOf('{node_napi_label}') === -1; + return to; }; -/***/ }), - -/***/ 10887: -/***/ ((module, exports, __nccwpck_require__) => { - -"use strict"; - -module.exports = exports; - -var path = __nccwpck_require__(85622); -var semver = __nccwpck_require__(2964); -var url = __nccwpck_require__(78835); -var detect_libc = __nccwpck_require__(34889); -var napi = __nccwpck_require__(10480); +/***/ }), -var abi_crosswalk; +/***/ 1223: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -// This is used for unit testing to provide a fake -// ABI crosswalk that emulates one that is not updated -// for the current version -if (process.env.NODE_PRE_GYP_ABI_CROSSWALK) { - abi_crosswalk = require(process.env.NODE_PRE_GYP_ABI_CROSSWALK); -} else { - abi_crosswalk = __nccwpck_require__(60282); -} +var wrappy = __nccwpck_require__(62940) +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) -var major_versions = {}; -Object.keys(abi_crosswalk).forEach(function(v) { - var major = v.split('.')[0]; - if (!major_versions[major]) { - major_versions[major] = v; - } -}); +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) -function get_electron_abi(runtime, target_version) { - if (!runtime) { - throw new Error("get_electron_abi requires valid runtime arg"); - } - if (typeof target_version === 'undefined') { - // erroneous CLI call - throw new Error("Empty target version is not supported if electron is the target."); - } - // Electron guarantees that patch version update won't break native modules. - var sem_ver = semver.parse(target_version); - return runtime + '-v' + sem_ver.major + '.' + sem_ver.minor; -} -module.exports.get_electron_abi = get_electron_abi; + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) -function get_node_webkit_abi(runtime, target_version) { - if (!runtime) { - throw new Error("get_node_webkit_abi requires valid runtime arg"); - } - if (typeof target_version === 'undefined') { - // erroneous CLI call - throw new Error("Empty target version is not supported if node-webkit is the target."); - } - return runtime + '-v' + target_version; +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f } -module.exports.get_node_webkit_abi = get_node_webkit_abi; -function get_node_abi(runtime, versions) { - if (!runtime) { - throw new Error("get_node_abi requires valid runtime arg"); - } - if (!versions) { - throw new Error("get_node_abi requires valid process.versions object"); - } - var sem_ver = semver.parse(versions.node); - if (sem_ver.major === 0 && sem_ver.minor % 2) { // odd series - // https://github.com/mapbox/node-pre-gyp/issues/124 - return runtime+'-v'+versions.node; - } else { - // process.versions.modules added in >= v0.10.4 and v0.11.7 - // https://github.com/joyent/node/commit/ccabd4a6fa8a6eb79d29bc3bbe9fe2b6531c2d8e - return versions.modules ? runtime+'-v' + (+versions.modules) : - 'v8-' + versions.v8.split('.').slice(0,2).join('.'); - } +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f } -module.exports.get_node_abi = get_node_abi; - -function get_runtime_abi(runtime, target_version) { - if (!runtime) { - throw new Error("get_runtime_abi requires valid runtime arg"); - } - if (runtime === 'node-webkit') { - return get_node_webkit_abi(runtime, target_version || process.versions['node-webkit']); - } else if (runtime === 'electron') { - return get_electron_abi(runtime, target_version || process.versions.electron); - } else { - if (runtime != 'node') { - throw new Error("Unknown Runtime: '" + runtime + "'"); - } - if (!target_version) { - return get_node_abi(runtime,process.versions); - } else { - var cross_obj; - // abi_crosswalk generated with ./scripts/abi_crosswalk.js - if (abi_crosswalk[target_version]) { - cross_obj = abi_crosswalk[target_version]; - } else { - var target_parts = target_version.split('.').map(function(i) { return +i; }); - if (target_parts.length != 3) { // parse failed - throw new Error("Unknown target version: " + target_version); - } - /* - The below code tries to infer the last known ABI compatible version - that we have recorded in the abi_crosswalk.json when an exact match - is not possible. The reasons for this to exist are complicated: - - We support passing --target to be able to allow developers to package binaries for versions of node - that are not the same one as they are running. This might also be used in combination with the - --target_arch or --target_platform flags to also package binaries for alternative platforms - - When --target is passed we can't therefore determine the ABI (process.versions.modules) from the node - version that is running in memory - - So, therefore node-pre-gyp keeps an "ABI crosswalk" (lib/util/abi_crosswalk.json) to be able to look - this info up for all versions - - But we cannot easily predict what the future ABI will be for released versions - - And node-pre-gyp needs to be a `bundledDependency` in apps that depend on it in order to work correctly - by being fully available at install time. - - So, the speed of node releases and the bundled nature of node-pre-gyp mean that a new node-pre-gyp release - need to happen for every node.js/io.js/node-webkit/nw.js/atom-shell/etc release that might come online if - you want the `--target` flag to keep working for the latest version - - Which is impractical ^^ - - Hence the below code guesses about future ABI to make the need to update node-pre-gyp less demanding. - In practice then you can have a dependency of your app like `node-sqlite3` that bundles a `node-pre-gyp` that - only knows about node v0.10.33 in the `abi_crosswalk.json` but target node v0.10.34 (which is assumed to be - ABI compatible with v0.10.33). +/***/ }), - TODO: use semver module instead of custom version parsing - */ - var major = target_parts[0]; - var minor = target_parts[1]; - var patch = target_parts[2]; - // io.js: yeah if node.js ever releases 1.x this will break - // but that is unlikely to happen: https://github.com/iojs/io.js/pull/253#issuecomment-69432616 - if (major === 1) { - // look for last release that is the same major version - // e.g. we assume io.js 1.x is ABI compatible with >= 1.0.0 - while (true) { - if (minor > 0) --minor; - if (patch > 0) --patch; - var new_iojs_target = '' + major + '.' + minor + '.' + patch; - if (abi_crosswalk[new_iojs_target]) { - cross_obj = abi_crosswalk[new_iojs_target]; - console.log('Warning: node-pre-gyp could not find exact match for ' + target_version); - console.log('Warning: but node-pre-gyp successfully choose ' + new_iojs_target + ' as ABI compatible target'); - break; - } - if (minor === 0 && patch === 0) { - break; - } - } - } else if (major >= 2) { - // look for last release that is the same major version - if (major_versions[major]) { - cross_obj = abi_crosswalk[major_versions[major]]; - console.log('Warning: node-pre-gyp could not find exact match for ' + target_version); - console.log('Warning: but node-pre-gyp successfully choose ' + major_versions[major] + ' as ABI compatible target'); - } - } else if (major === 0) { // node.js - if (target_parts[1] % 2 === 0) { // for stable/even node.js series - // look for the last release that is the same minor release - // e.g. we assume node 0.10.x is ABI compatible with >= 0.10.0 - while (--patch > 0) { - var new_node_target = '' + major + '.' + minor + '.' + patch; - if (abi_crosswalk[new_node_target]) { - cross_obj = abi_crosswalk[new_node_target]; - console.log('Warning: node-pre-gyp could not find exact match for ' + target_version); - console.log('Warning: but node-pre-gyp successfully choose ' + new_node_target + ' as ABI compatible target'); - break; - } - } - } - } - } - if (!cross_obj) { - throw new Error("Unsupported target version: " + target_version); - } - // emulate process.versions - var versions_obj = { - node: target_version, - v8: cross_obj.v8+'.0', - // abi_crosswalk uses 1 for node versions lacking process.versions.modules - // process.versions.modules added in >= v0.10.4 and v0.11.7 - modules: cross_obj.node_abi > 1 ? cross_obj.node_abi : undefined - }; - return get_node_abi(runtime, versions_obj); - } - } -} -module.exports.get_runtime_abi = get_runtime_abi; +/***/ 43406: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var required_parameters = [ - 'module_name', - 'module_path', - 'host' -]; +"use strict"; -function validate_config(package_json,opts) { - var msg = package_json.name + ' package.json is not node-pre-gyp ready:\n'; - var missing = []; - if (!package_json.main) { - missing.push('main'); - } - if (!package_json.version) { - missing.push('version'); - } - if (!package_json.name) { - missing.push('name'); - } - if (!package_json.binary) { - missing.push('binary'); - } - var o = package_json.binary; - required_parameters.forEach(function(p) { - if (missing.indexOf('binary') > -1) { - missing.pop('binary'); - } - if (!o || o[p] === undefined || o[p] === "") { - missing.push('binary.' + p); - } - }); - if (missing.length >= 1) { - throw new Error(msg+"package.json must declare these properties: \n" + missing.join('\n')); - } - if (o) { - // enforce https over http - var protocol = url.parse(o.host).protocol; - if (protocol === 'http:') { - throw new Error("'host' protocol ("+protocol+") is invalid - only 'https:' is accepted"); - } - } - napi.validate_package_json(package_json,opts); -} +var os = __nccwpck_require__(12087); -module.exports.validate_config = validate_config; +function homedir() { + var env = process.env; + var home = env.HOME; + var user = env.LOGNAME || env.USER || env.LNAME || env.USERNAME; -function eval_template(template,opts) { - Object.keys(opts).forEach(function(key) { - var pattern = '{'+key+'}'; - while (template.indexOf(pattern) > -1) { - template = template.replace(pattern,opts[key]); - } - }); - return template; -} + if (process.platform === 'win32') { + return env.USERPROFILE || env.HOMEDRIVE + env.HOMEPATH || home || null; + } -// url.resolve needs single trailing slash -// to behave correctly, otherwise a double slash -// may end up in the url which breaks requests -// and a lacking slash may not lead to proper joining -function fix_slashes(pathname) { - if (pathname.slice(-1) != '/') { - return pathname + '/'; - } - return pathname; -} + if (process.platform === 'darwin') { + return home || (user ? '/Users/' + user : null); + } -// remove double slashes -// note: path.normalize will not work because -// it will convert forward to back slashes -function drop_double_slashes(pathname) { - return pathname.replace(/\/\//g,'/'); -} + if (process.platform === 'linux') { + return home || (process.getuid() === 0 ? '/root' : (user ? '/home/' + user : null)); + } -function get_process_runtime(versions) { - var runtime = 'node'; - if (versions['node-webkit']) { - runtime = 'node-webkit'; - } else if (versions.electron) { - runtime = 'electron'; - } - return runtime; + return home || null; } -module.exports.get_process_runtime = get_process_runtime; - -var default_package_name = '{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz'; -var default_remote_path = ''; - -module.exports.evaluate = function(package_json,options,napi_build_version) { - options = options || {}; - validate_config(package_json,options); // options is a suitable substitute for opts in this case - var v = package_json.version; - var module_version = semver.parse(v); - var runtime = options.runtime || get_process_runtime(process.versions); - var opts = { - name: package_json.name, - configuration: Boolean(options.debug) ? 'Debug' : 'Release', - debug: options.debug, - module_name: package_json.binary.module_name, - version: module_version.version, - prerelease: module_version.prerelease.length ? module_version.prerelease.join('.') : '', - build: module_version.build.length ? module_version.build.join('.') : '', - major: module_version.major, - minor: module_version.minor, - patch: module_version.patch, - runtime: runtime, - node_abi: get_runtime_abi(runtime,options.target), - node_abi_napi: napi.get_napi_version(options.target) ? 'napi' : get_runtime_abi(runtime,options.target), - napi_version: napi.get_napi_version(options.target), // non-zero numeric, undefined if unsupported - napi_build_version: napi_build_version || '', - node_napi_label: napi_build_version ? 'napi-v' + napi_build_version : get_runtime_abi(runtime,options.target), - target: options.target || '', - platform: options.target_platform || process.platform, - target_platform: options.target_platform || process.platform, - arch: options.target_arch || process.arch, - target_arch: options.target_arch || process.arch, - libc: options.target_libc || detect_libc.family || 'unknown', - module_main: package_json.main, - toolset : options.toolset || '' // address https://github.com/mapbox/node-pre-gyp/issues/119 - }; - // support host mirror with npm config `--{module_name}_binary_host_mirror` - // e.g.: https://github.com/node-inspector/v8-profiler/blob/master/package.json#L25 - // > npm install v8-profiler --profiler_binary_host_mirror=https://npm.taobao.org/mirrors/node-inspector/ - var host = process.env['npm_config_' + opts.module_name + '_binary_host_mirror'] || package_json.binary.host; - opts.host = fix_slashes(eval_template(host,opts)); - opts.module_path = eval_template(package_json.binary.module_path,opts); - // now we resolve the module_path to ensure it is absolute so that binding.gyp variables work predictably - if (options.module_root) { - // resolve relative to known module root: works for pre-binding require - opts.module_path = path.join(options.module_root,opts.module_path); - } else { - // resolve relative to current working directory: works for node-pre-gyp commands - opts.module_path = path.resolve(opts.module_path); - } - opts.module = path.join(opts.module_path,opts.module_name + '.node'); - opts.remote_path = package_json.binary.remote_path ? drop_double_slashes(fix_slashes(eval_template(package_json.binary.remote_path,opts))) : default_remote_path; - var package_name = package_json.binary.package_name ? package_json.binary.package_name : default_package_name; - opts.package_name = eval_template(package_name,opts); - opts.staged_tarball = path.join('build/stage',opts.remote_path,opts.package_name); - opts.hosted_path = url.resolve(opts.host,opts.remote_path); - opts.hosted_tarball = url.resolve(opts.hosted_path,opts.package_name); - return opts; -}; +module.exports = typeof os.homedir === 'function' ? os.homedir : homedir; /***/ }), -/***/ 41743: -/***/ ((module, exports, __nccwpck_require__) => { +/***/ 71284: +/***/ ((module) => { -// info about each config option. +"use strict"; -var debug = process.env.DEBUG_NOPT || process.env.NOPT_DEBUG - ? function () { console.error.apply(console, arguments) } - : function () {} +var isWindows = process.platform === 'win32'; +var trailingSlashRe = isWindows ? /[^:]\\$/ : /.\/$/; -var url = __nccwpck_require__(78835) - , path = __nccwpck_require__(85622) - , Stream = __nccwpck_require__(92413).Stream - , abbrev = __nccwpck_require__(48566) - , osenv = __nccwpck_require__(84669) +// https://github.com/nodejs/node/blob/3e7a14381497a3b73dda68d05b5130563cdab420/lib/os.js#L25-L43 +module.exports = function () { + var path; -module.exports = exports = nopt -exports.clean = clean + if (isWindows) { + path = process.env.TEMP || + process.env.TMP || + (process.env.SystemRoot || process.env.windir) + '\\temp'; + } else { + path = process.env.TMPDIR || + process.env.TMP || + process.env.TEMP || + '/tmp'; + } -exports.typeDefs = - { String : { type: String, validate: validateString } - , Boolean : { type: Boolean, validate: validateBoolean } - , url : { type: url, validate: validateUrl } - , Number : { type: Number, validate: validateNumber } - , path : { type: path, validate: validatePath } - , Stream : { type: Stream, validate: validateStream } - , Date : { type: Date, validate: validateDate } - } + if (trailingSlashRe.test(path)) { + path = path.slice(0, -1); + } -function nopt (types, shorthands, args, slice) { - args = args || process.argv - types = types || {} - shorthands = shorthands || {} - if (typeof slice !== "number") slice = 2 + return path; +}; - debug(types, shorthands, args, slice) - args = args.slice(slice) - var data = {} - , key - , argv = { - remain: [], - cooked: args, - original: args.slice(0) - } +/***/ }), - parse(args, data, argv.remain, types, shorthands) - // now data is full - clean(data, types, exports.typeDefs) - data.argv = argv - Object.defineProperty(data.argv, 'toString', { value: function () { - return this.original.map(JSON.stringify).join(" ") - }, enumerable: false }) - return data -} +/***/ 84669: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -function clean (data, types, typeDefs) { - typeDefs = typeDefs || exports.typeDefs - var remove = {} - , typeDefault = [false, true, null, String, Array] +var isWindows = process.platform === 'win32' +var path = __nccwpck_require__(85622) +var exec = __nccwpck_require__(63129).exec +var osTmpdir = __nccwpck_require__(71284) +var osHomedir = __nccwpck_require__(43406) - Object.keys(data).forEach(function (k) { - if (k === "argv") return - var val = data[k] - , isArray = Array.isArray(val) - , type = types[k] - if (!isArray) val = [val] - if (!type) type = typeDefault - if (type === Array) type = typeDefault.concat(Array) - if (!Array.isArray(type)) type = [type] +// looking up envs is a bit costly. +// Also, sometimes we want to have a fallback +// Pass in a callback to wait for the fallback on failures +// After the first lookup, always returns the same thing. +function memo (key, lookup, fallback) { + var fell = false + var falling = false + exports[key] = function (cb) { + var val = lookup() + if (!val && !fell && !falling && fallback) { + fell = true + falling = true + exec(fallback, function (er, output, stderr) { + falling = false + if (er) return // oh well, we tried + val = output.trim() + }) + } + exports[key] = function (cb) { + if (cb) process.nextTick(cb.bind(null, null, val)) + return val + } + if (cb && !falling) process.nextTick(cb.bind(null, null, val)) + return val + } +} - debug("val=%j", val) - debug("types=", type) - val = val.map(function (val) { - // if it's an unknown value, then parse false/true/null/numbers/dates - if (typeof val === "string") { - debug("string %j", val) - val = val.trim() - if ((val === "null" && ~type.indexOf(null)) - || (val === "true" && - (~type.indexOf(true) || ~type.indexOf(Boolean))) - || (val === "false" && - (~type.indexOf(false) || ~type.indexOf(Boolean)))) { - val = JSON.parse(val) - debug("jsonable %j", val) - } else if (~type.indexOf(Number) && !isNaN(val)) { - debug("convert to number", val) - val = +val - } else if (~type.indexOf(Date) && !isNaN(Date.parse(val))) { - debug("convert to date", val) - val = new Date(val) - } - } +memo('user', function () { + return ( isWindows + ? process.env.USERDOMAIN + '\\' + process.env.USERNAME + : process.env.USER + ) +}, 'whoami') - if (!types.hasOwnProperty(k)) { - return val - } +memo('prompt', function () { + return isWindows ? process.env.PROMPT : process.env.PS1 +}) - // allow `--no-blah` to set 'blah' to null if null is allowed - if (val === false && ~type.indexOf(null) && - !(~type.indexOf(false) || ~type.indexOf(Boolean))) { - val = null - } +memo('hostname', function () { + return isWindows ? process.env.COMPUTERNAME : process.env.HOSTNAME +}, 'hostname') - var d = {} - d[k] = val - debug("prevalidated val", d, val, types[k]) - if (!validate(d, k, val, types[k], typeDefs)) { - if (exports.invalidHandler) { - exports.invalidHandler(k, val, types[k], data) - } else if (exports.invalidHandler !== false) { - debug("invalid: "+k+"="+val, types[k]) - } - return remove - } - debug("validated val", d, val, types[k]) - return d[k] - }).filter(function (val) { return val !== remove }) +memo('tmpdir', function () { + return osTmpdir() +}) - // if we allow Array specifically, then an empty array is how we - // express 'no value here', not null. Allow it. - if (!val.length && type.indexOf(Array) === -1) { - debug('VAL HAS NO LENGTH, DELETE IT', val, k, type.indexOf(Array)) - delete data[k] - } - else if (isArray) { - debug(isArray, data[k], val) - data[k] = val - } else data[k] = val[0] +memo('home', function () { + return osHomedir() +}) - debug("k=%s val=%j", k, val, data[k]) - }) -} +memo('path', function () { + return (process.env.PATH || + process.env.Path || + process.env.path).split(isWindows ? ';' : ':') +}) -function validateString (data, k, val) { - data[k] = String(val) -} +memo('editor', function () { + return process.env.EDITOR || + process.env.VISUAL || + (isWindows ? 'notepad.exe' : 'vi') +}) -function validatePath (data, k, val) { - if (val === true) return false - if (val === null) return true +memo('shell', function () { + return isWindows ? process.env.ComSpec || 'cmd' + : process.env.SHELL || 'bash' +}) - val = String(val) - var isWin = process.platform === 'win32' - , homePattern = isWin ? /^~(\/|\\)/ : /^~\// - , home = osenv.home() +/***/ }), - if (home && val.match(homePattern)) { - data[k] = path.resolve(home, val.substr(2)) - } else { - data[k] = path.resolve(val) - } - return true -} +/***/ 9759: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -function validateNumber (data, k, val) { - debug("validate Number %j %j %j", k, val, isNaN(val)) - if (isNaN(val)) return false - data[k] = +val -} +"use strict"; -function validateDate (data, k, val) { - var s = Date.parse(val) - debug("validate Date %j %j %j", k, val, s) - if (isNaN(s)) return false - data[k] = new Date(val) -} -function validateBoolean (data, k, val) { - if (val instanceof Boolean) val = val.valueOf() - else if (typeof val === "string") { - if (!isNaN(val)) val = !!(+val) - else if (val === "null" || val === "false") val = false - else val = true - } else val = !!val - data[k] = val -} +const doctype = __nccwpck_require__(27079); +const { DOCUMENT_MODE } = __nccwpck_require__(69338); -function validateUrl (data, k, val) { - val = url.parse(String(val)) - if (!val.host) return false - data[k] = val.href -} +//Conversion tables for DOM Level1 structure emulation +const nodeTypes = { + element: 1, + text: 3, + cdata: 4, + comment: 8 +}; -function validateStream (data, k, val) { - if (!(val instanceof Stream)) return false - data[k] = val -} +const nodePropertyShorthands = { + tagName: 'name', + childNodes: 'children', + parentNode: 'parent', + previousSibling: 'prev', + nextSibling: 'next', + nodeValue: 'data' +}; -function validate (data, k, val, type, typeDefs) { - // arrays are lists of types. - if (Array.isArray(type)) { - for (var i = 0, l = type.length; i < l; i ++) { - if (type[i] === Array) continue - if (validate(data, k, val, type[i], typeDefs)) return true +//Node +class Node { + constructor(props) { + for (const key of Object.keys(props)) { + this[key] = props[key]; + } } - delete data[k] - return false - } - // an array of anything? - if (type === Array) return true + get firstChild() { + const children = this.children; - // NaN is poisonous. Means that something is not allowed. - if (type !== type) { - debug("Poison NaN", k, val, type) - delete data[k] - return false - } + return (children && children[0]) || null; + } - // explicit list of values - if (val === type) { - debug("Explicitly allowed %j", val) - // if (isArray) (data[k] = data[k] || []).push(val) - // else data[k] = val - data[k] = val - return true - } + get lastChild() { + const children = this.children; - // now go through the list of typeDefs, validate against each one. - var ok = false - , types = Object.keys(typeDefs) - for (var i = 0, l = types.length; i < l; i ++) { - debug("test type %j %j %j", k, val, types[i]) - var t = typeDefs[types[i]] - if (t && - ((type && type.name && t.type && t.type.name) ? (type.name === t.type.name) : (type === t.type))) { - var d = {} - ok = false !== t.validate(d, k, val) - val = d[k] - if (ok) { - // if (isArray) (data[k] = data[k] || []).push(val) - // else data[k] = val - data[k] = val - break - } + return (children && children[children.length - 1]) || null; } - } - debug("OK? %j (%j %j %j)", ok, k, val, types[i]) - if (!ok) delete data[k] - return ok + get nodeType() { + return nodeTypes[this.type] || nodeTypes.element; + } } -function parse (args, data, remain, types, shorthands) { - debug("parse", args, data, remain) - - var key = null - , abbrevs = abbrev(Object.keys(types)) - , shortAbbr = abbrev(Object.keys(shorthands)) - - for (var i = 0; i < args.length; i ++) { - var arg = args[i] - debug("arg", arg) - - if (arg.match(/^-{2,}$/)) { - // done with keys. - // the rest are args. - remain.push.apply(remain, args.slice(i + 1)) - args[i] = "--" - break - } - var hadEq = false - if (arg.charAt(0) === "-" && arg.length > 1) { - var at = arg.indexOf('=') - if (at > -1) { - hadEq = true - var v = arg.substr(at + 1) - arg = arg.substr(0, at) - args.splice(i, 1, arg, v) - } +Object.keys(nodePropertyShorthands).forEach(key => { + const shorthand = nodePropertyShorthands[key]; - // see if it's a shorthand - // if so, splice and back up to re-parse it. - var shRes = resolveShort(arg, shorthands, shortAbbr, abbrevs) - debug("arg=%j shRes=%j", arg, shRes) - if (shRes) { - debug(arg, shRes) - args.splice.apply(args, [i, 1].concat(shRes)) - if (arg !== shRes[0]) { - i -- - continue + Object.defineProperty(Node.prototype, key, { + get: function() { + return this[shorthand] || null; + }, + set: function(val) { + this[shorthand] = val; + return val; } - } - arg = arg.replace(/^-+/, "") - var no = null - while (arg.toLowerCase().indexOf("no-") === 0) { - no = !no - arg = arg.substr(3) - } - - if (abbrevs[arg]) arg = abbrevs[arg] + }); +}); - var argType = types[arg] - var isTypeArray = Array.isArray(argType) - if (isTypeArray && argType.length === 1) { - isTypeArray = false - argType = argType[0] - } +//Node construction +exports.createDocument = function() { + return new Node({ + type: 'root', + name: 'root', + parent: null, + prev: null, + next: null, + children: [], + 'x-mode': DOCUMENT_MODE.NO_QUIRKS + }); +}; - var isArray = argType === Array || - isTypeArray && argType.indexOf(Array) !== -1 +exports.createDocumentFragment = function() { + return new Node({ + type: 'root', + name: 'root', + parent: null, + prev: null, + next: null, + children: [] + }); +}; - // allow unknown things to be arrays if specified multiple times. - if (!types.hasOwnProperty(arg) && data.hasOwnProperty(arg)) { - if (!Array.isArray(data[arg])) - data[arg] = [data[arg]] - isArray = true - } +exports.createElement = function(tagName, namespaceURI, attrs) { + const attribs = Object.create(null); + const attribsNamespace = Object.create(null); + const attribsPrefix = Object.create(null); - var val - , la = args[i + 1] + for (let i = 0; i < attrs.length; i++) { + const attrName = attrs[i].name; - var isBool = typeof no === 'boolean' || - argType === Boolean || - isTypeArray && argType.indexOf(Boolean) !== -1 || - (typeof argType === 'undefined' && !hadEq) || - (la === "false" && - (argType === null || - isTypeArray && ~argType.indexOf(null))) + attribs[attrName] = attrs[i].value; + attribsNamespace[attrName] = attrs[i].namespace; + attribsPrefix[attrName] = attrs[i].prefix; + } - if (isBool) { - // just set and move along - val = !no - // however, also support --bool true or --bool false - if (la === "true" || la === "false") { - val = JSON.parse(la) - la = null - if (no) val = !val - i ++ - } + return new Node({ + type: tagName === 'script' || tagName === 'style' ? tagName : 'tag', + name: tagName, + namespace: namespaceURI, + attribs: attribs, + 'x-attribsNamespace': attribsNamespace, + 'x-attribsPrefix': attribsPrefix, + children: [], + parent: null, + prev: null, + next: null + }); +}; - // also support "foo":[Boolean, "bar"] and "--foo bar" - if (isTypeArray && la) { - if (~argType.indexOf(la)) { - // an explicit type - val = la - i ++ - } else if ( la === "null" && ~argType.indexOf(null) ) { - // null allowed - val = null - i ++ - } else if ( !la.match(/^-{2,}[^-]/) && - !isNaN(la) && - ~argType.indexOf(Number) ) { - // number - val = +la - i ++ - } else if ( !la.match(/^-[^-]/) && ~argType.indexOf(String) ) { - // string - val = la - i ++ - } - } +exports.createCommentNode = function(data) { + return new Node({ + type: 'comment', + data: data, + parent: null, + prev: null, + next: null + }); +}; - if (isArray) (data[arg] = data[arg] || []).push(val) - else data[arg] = val +const createTextNode = function(value) { + return new Node({ + type: 'text', + data: value, + parent: null, + prev: null, + next: null + }); +}; - continue - } +//Tree mutation +const appendChild = (exports.appendChild = function(parentNode, newNode) { + const prev = parentNode.children[parentNode.children.length - 1]; - if (argType === String) { - if (la === undefined) { - la = "" - } else if (la.match(/^-{1,2}[^-]+/)) { - la = "" - i -- - } - } + if (prev) { + prev.next = newNode; + newNode.prev = prev; + } - if (la && la.match(/^-{2,}$/)) { - la = undefined - i -- - } + parentNode.children.push(newNode); + newNode.parent = parentNode; +}); - val = la === undefined ? true : la - if (isArray) (data[arg] = data[arg] || []).push(val) - else data[arg] = val +const insertBefore = (exports.insertBefore = function(parentNode, newNode, referenceNode) { + const insertionIdx = parentNode.children.indexOf(referenceNode); + const prev = referenceNode.prev; - i ++ - continue + if (prev) { + prev.next = newNode; + newNode.prev = prev; } - remain.push(arg) - } -} -function resolveShort (arg, shorthands, shortAbbr, abbrevs) { - // handle single-char shorthands glommed together, like - // npm ls -glp, but only if there is one dash, and only if - // all of the chars are single-char shorthands, and it's - // not a match to some other abbrev. - arg = arg.replace(/^-+/, '') + referenceNode.prev = newNode; + newNode.next = referenceNode; - // if it's an exact known option, then don't go any further - if (abbrevs[arg] === arg) - return null + parentNode.children.splice(insertionIdx, 0, newNode); + newNode.parent = parentNode; +}); - // if it's an exact known shortopt, same deal - if (shorthands[arg]) { - // make it an array, if it's a list of words - if (shorthands[arg] && !Array.isArray(shorthands[arg])) - shorthands[arg] = shorthands[arg].split(/\s+/) +exports.setTemplateContent = function(templateElement, contentElement) { + appendChild(templateElement, contentElement); +}; - return shorthands[arg] - } +exports.getTemplateContent = function(templateElement) { + return templateElement.children[0]; +}; - // first check to see if this arg is a set of single-char shorthands - var singles = shorthands.___singles - if (!singles) { - singles = Object.keys(shorthands).filter(function (s) { - return s.length === 1 - }).reduce(function (l,r) { - l[r] = true - return l - }, {}) - shorthands.___singles = singles - debug('shorthand singles', singles) - } +exports.setDocumentType = function(document, name, publicId, systemId) { + const data = doctype.serializeContent(name, publicId, systemId); + let doctypeNode = null; + + for (let i = 0; i < document.children.length; i++) { + if (document.children[i].type === 'directive' && document.children[i].name === '!doctype') { + doctypeNode = document.children[i]; + break; + } + } - var chrs = arg.split("").filter(function (c) { - return singles[c] - }) + if (doctypeNode) { + doctypeNode.data = data; + doctypeNode['x-name'] = name; + doctypeNode['x-publicId'] = publicId; + doctypeNode['x-systemId'] = systemId; + } else { + appendChild( + document, + new Node({ + type: 'directive', + name: '!doctype', + data: data, + 'x-name': name, + 'x-publicId': publicId, + 'x-systemId': systemId + }) + ); + } +}; - if (chrs.join("") === arg) return chrs.map(function (c) { - return shorthands[c] - }).reduce(function (l, r) { - return l.concat(r) - }, []) +exports.setDocumentMode = function(document, mode) { + document['x-mode'] = mode; +}; +exports.getDocumentMode = function(document) { + return document['x-mode']; +}; - // if it's an arg abbrev, and not a literal shorthand, then prefer the arg - if (abbrevs[arg] && !shorthands[arg]) - return null +exports.detachNode = function(node) { + if (node.parent) { + const idx = node.parent.children.indexOf(node); + const prev = node.prev; + const next = node.next; - // if it's an abbr for a shorthand, then use that - if (shortAbbr[arg]) - arg = shortAbbr[arg] + node.prev = null; + node.next = null; - // make it an array, if it's a list of words - if (shorthands[arg] && !Array.isArray(shorthands[arg])) - shorthands[arg] = shorthands[arg].split(/\s+/) + if (prev) { + prev.next = next; + } - return shorthands[arg] -} + if (next) { + next.prev = prev; + } + node.parent.children.splice(idx, 1); + node.parent = null; + } +}; -/***/ }), +exports.insertText = function(parentNode, text) { + const lastChild = parentNode.children[parentNode.children.length - 1]; -/***/ 2964: -/***/ ((module, exports) => { + if (lastChild && lastChild.type === 'text') { + lastChild.data += text; + } else { + appendChild(parentNode, createTextNode(text)); + } +}; -exports = module.exports = SemVer +exports.insertTextBefore = function(parentNode, text, referenceNode) { + const prevNode = parentNode.children[parentNode.children.indexOf(referenceNode) - 1]; -var debug -/* istanbul ignore next */ -if (typeof process === 'object' && - process.env && - process.env.NODE_DEBUG && - /\bsemver\b/i.test(process.env.NODE_DEBUG)) { - debug = function () { - var args = Array.prototype.slice.call(arguments, 0) - args.unshift('SEMVER') - console.log.apply(console, args) - } -} else { - debug = function () {} -} + if (prevNode && prevNode.type === 'text') { + prevNode.data += text; + } else { + insertBefore(parentNode, createTextNode(text), referenceNode); + } +}; -// Note: this is the semver.org version of the spec that it implements -// Not necessarily the package version of this code. -exports.SEMVER_SPEC_VERSION = '2.0.0' +exports.adoptAttributes = function(recipient, attrs) { + for (let i = 0; i < attrs.length; i++) { + const attrName = attrs[i].name; -var MAX_LENGTH = 256 -var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || - /* istanbul ignore next */ 9007199254740991 + if (typeof recipient.attribs[attrName] === 'undefined') { + recipient.attribs[attrName] = attrs[i].value; + recipient['x-attribsNamespace'][attrName] = attrs[i].namespace; + recipient['x-attribsPrefix'][attrName] = attrs[i].prefix; + } + } +}; -// Max safe segment length for coercion. -var MAX_SAFE_COMPONENT_LENGTH = 16 +//Tree traversing +exports.getFirstChild = function(node) { + return node.children[0]; +}; -// The actual regexps go on exports.re -var re = exports.re = [] -var src = exports.src = [] -var R = 0 +exports.getChildNodes = function(node) { + return node.children; +}; -// The following Regular Expressions can be used for tokenizing, -// validating, and parsing SemVer version strings. +exports.getParentNode = function(node) { + return node.parent; +}; -// ## Numeric Identifier -// A single `0`, or a non-zero digit followed by zero or more digits. +exports.getAttrList = function(element) { + const attrList = []; -var NUMERICIDENTIFIER = R++ -src[NUMERICIDENTIFIER] = '0|[1-9]\\d*' -var NUMERICIDENTIFIERLOOSE = R++ -src[NUMERICIDENTIFIERLOOSE] = '[0-9]+' + for (const name in element.attribs) { + attrList.push({ + name: name, + value: element.attribs[name], + namespace: element['x-attribsNamespace'][name], + prefix: element['x-attribsPrefix'][name] + }); + } -// ## Non-numeric Identifier -// Zero or more digits, followed by a letter or hyphen, and then zero or -// more letters, digits, or hyphens. + return attrList; +}; -var NONNUMERICIDENTIFIER = R++ -src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' +//Node data +exports.getTagName = function(element) { + return element.name; +}; -// ## Main Version -// Three dot-separated numeric identifiers. +exports.getNamespaceURI = function(element) { + return element.namespace; +}; -var MAINVERSION = R++ -src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' + - '(' + src[NUMERICIDENTIFIER] + ')\\.' + - '(' + src[NUMERICIDENTIFIER] + ')' +exports.getTextNodeContent = function(textNode) { + return textNode.data; +}; -var MAINVERSIONLOOSE = R++ -src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[NUMERICIDENTIFIERLOOSE] + ')' +exports.getCommentNodeContent = function(commentNode) { + return commentNode.data; +}; -// ## Pre-release Version Identifier -// A numeric identifier, or a non-numeric identifier. +exports.getDocumentTypeNodeName = function(doctypeNode) { + return doctypeNode['x-name']; +}; -var PRERELEASEIDENTIFIER = R++ -src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] + - '|' + src[NONNUMERICIDENTIFIER] + ')' +exports.getDocumentTypeNodePublicId = function(doctypeNode) { + return doctypeNode['x-publicId']; +}; -var PRERELEASEIDENTIFIERLOOSE = R++ -src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] + - '|' + src[NONNUMERICIDENTIFIER] + ')' +exports.getDocumentTypeNodeSystemId = function(doctypeNode) { + return doctypeNode['x-systemId']; +}; -// ## Pre-release Version -// Hyphen, followed by one or more dot-separated pre-release version -// identifiers. +//Node types +exports.isTextNode = function(node) { + return node.type === 'text'; +}; -var PRERELEASE = R++ -src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] + - '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))' +exports.isCommentNode = function(node) { + return node.type === 'comment'; +}; -var PRERELEASELOOSE = R++ -src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] + - '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))' +exports.isDocumentTypeNode = function(node) { + return node.type === 'directive' && node.name === '!doctype'; +}; -// ## Build Metadata Identifier -// Any combination of digits, letters, or hyphens. +exports.isElementNode = function(node) { + return !!node.attribs; +}; -var BUILDIDENTIFIER = R++ -src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+' +// Source code location +exports.setNodeSourceCodeLocation = function(node, location) { + node.sourceCodeLocation = location; +}; -// ## Build Metadata -// Plus sign, followed by one or more period-separated build metadata -// identifiers. +exports.getNodeSourceCodeLocation = function(node) { + return node.sourceCodeLocation; +}; -var BUILD = R++ -src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + - '(?:\\.' + src[BUILDIDENTIFIER] + ')*))' +exports.updateNodeSourceCodeLocation = function(node, endLocation) { + node.sourceCodeLocation = Object.assign(node.sourceCodeLocation, endLocation); +}; -// ## Full Version String -// A main version, followed optionally by a pre-release version and -// build metadata. -// Note that the only major, minor, patch, and pre-release sections of -// the version string are capturing groups. The build metadata is not a -// capturing group, because it should not ever be used in version -// comparison. +/***/ }), -var FULL = R++ -var FULLPLAIN = 'v?' + src[MAINVERSION] + - src[PRERELEASE] + '?' + - src[BUILD] + '?' +/***/ 27079: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -src[FULL] = '^' + FULLPLAIN + '$' +"use strict"; -// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. -// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty -// common in the npm registry. -var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] + - src[PRERELEASELOOSE] + '?' + - src[BUILD] + '?' -var LOOSE = R++ -src[LOOSE] = '^' + LOOSEPLAIN + '$' +const { DOCUMENT_MODE } = __nccwpck_require__(69338); -var GTLT = R++ -src[GTLT] = '((?:<|>)?=?)' +//Const +const VALID_DOCTYPE_NAME = 'html'; +const VALID_SYSTEM_ID = 'about:legacy-compat'; +const QUIRKS_MODE_SYSTEM_ID = 'http://www.ibm.com/data/dtd/v11/ibmxhtml1-transitional.dtd'; -// Something like "2.*" or "1.2.x". -// Note that "x.x" is a valid xRange identifer, meaning "any version" -// Only the first item is strictly required. -var XRANGEIDENTIFIERLOOSE = R++ -src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' -var XRANGEIDENTIFIER = R++ -src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*' +const QUIRKS_MODE_PUBLIC_ID_PREFIXES = [ + '+//silmaril//dtd html pro v0r11 19970101//', + '-//as//dtd html 3.0 aswedit + extensions//', + '-//advasoft ltd//dtd html 3.0 aswedit + extensions//', + '-//ietf//dtd html 2.0 level 1//', + '-//ietf//dtd html 2.0 level 2//', + '-//ietf//dtd html 2.0 strict level 1//', + '-//ietf//dtd html 2.0 strict level 2//', + '-//ietf//dtd html 2.0 strict//', + '-//ietf//dtd html 2.0//', + '-//ietf//dtd html 2.1e//', + '-//ietf//dtd html 3.0//', + '-//ietf//dtd html 3.2 final//', + '-//ietf//dtd html 3.2//', + '-//ietf//dtd html 3//', + '-//ietf//dtd html level 0//', + '-//ietf//dtd html level 1//', + '-//ietf//dtd html level 2//', + '-//ietf//dtd html level 3//', + '-//ietf//dtd html strict level 0//', + '-//ietf//dtd html strict level 1//', + '-//ietf//dtd html strict level 2//', + '-//ietf//dtd html strict level 3//', + '-//ietf//dtd html strict//', + '-//ietf//dtd html//', + '-//metrius//dtd metrius presentational//', + '-//microsoft//dtd internet explorer 2.0 html strict//', + '-//microsoft//dtd internet explorer 2.0 html//', + '-//microsoft//dtd internet explorer 2.0 tables//', + '-//microsoft//dtd internet explorer 3.0 html strict//', + '-//microsoft//dtd internet explorer 3.0 html//', + '-//microsoft//dtd internet explorer 3.0 tables//', + '-//netscape comm. corp.//dtd html//', + '-//netscape comm. corp.//dtd strict html//', + "-//o'reilly and associates//dtd html 2.0//", + "-//o'reilly and associates//dtd html extended 1.0//", + "-//o'reilly and associates//dtd html extended relaxed 1.0//", + '-//sq//dtd html 2.0 hotmetal + extensions//', + '-//softquad software//dtd hotmetal pro 6.0::19990601::extensions to html 4.0//', + '-//softquad//dtd hotmetal pro 4.0::19971010::extensions to html 4.0//', + '-//spyglass//dtd html 2.0 extended//', + '-//sun microsystems corp.//dtd hotjava html//', + '-//sun microsystems corp.//dtd hotjava strict html//', + '-//w3c//dtd html 3 1995-03-24//', + '-//w3c//dtd html 3.2 draft//', + '-//w3c//dtd html 3.2 final//', + '-//w3c//dtd html 3.2//', + '-//w3c//dtd html 3.2s draft//', + '-//w3c//dtd html 4.0 frameset//', + '-//w3c//dtd html 4.0 transitional//', + '-//w3c//dtd html experimental 19960712//', + '-//w3c//dtd html experimental 970421//', + '-//w3c//dtd w3 html//', + '-//w3o//dtd w3 html 3.0//', + '-//webtechs//dtd mozilla html 2.0//', + '-//webtechs//dtd mozilla html//' +]; -var XRANGEPLAIN = R++ -src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + - '(?:' + src[PRERELEASE] + ')?' + - src[BUILD] + '?' + - ')?)?' +const QUIRKS_MODE_NO_SYSTEM_ID_PUBLIC_ID_PREFIXES = QUIRKS_MODE_PUBLIC_ID_PREFIXES.concat([ + '-//w3c//dtd html 4.01 frameset//', + '-//w3c//dtd html 4.01 transitional//' +]); -var XRANGEPLAINLOOSE = R++ -src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:' + src[PRERELEASELOOSE] + ')?' + - src[BUILD] + '?' + - ')?)?' +const QUIRKS_MODE_PUBLIC_IDS = ['-//w3o//dtd w3 html strict 3.0//en//', '-/w3c/dtd html 4.0 transitional/en', 'html']; +const LIMITED_QUIRKS_PUBLIC_ID_PREFIXES = ['-//w3c//dtd xhtml 1.0 frameset//', '-//w3c//dtd xhtml 1.0 transitional//']; -var XRANGE = R++ -src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$' -var XRANGELOOSE = R++ -src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$' +const LIMITED_QUIRKS_WITH_SYSTEM_ID_PUBLIC_ID_PREFIXES = LIMITED_QUIRKS_PUBLIC_ID_PREFIXES.concat([ + '-//w3c//dtd html 4.01 frameset//', + '-//w3c//dtd html 4.01 transitional//' +]); -// Coercion. -// Extract anything that could conceivably be a part of a valid semver -var COERCE = R++ -src[COERCE] = '(?:^|[^\\d])' + - '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + - '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + - '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + - '(?:$|[^\\d])' +//Utils +function enquoteDoctypeId(id) { + const quote = id.indexOf('"') !== -1 ? "'" : '"'; -// Tilde ranges. -// Meaning is "reasonably at or greater than" -var LONETILDE = R++ -src[LONETILDE] = '(?:~>?)' + return quote + id + quote; +} -var TILDETRIM = R++ -src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+' -re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g') -var tildeTrimReplace = '$1~' +function hasPrefix(publicId, prefixes) { + for (let i = 0; i < prefixes.length; i++) { + if (publicId.indexOf(prefixes[i]) === 0) { + return true; + } + } -var TILDE = R++ -src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$' -var TILDELOOSE = R++ -src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$' + return false; +} -// Caret ranges. -// Meaning is "at least and backwards compatible with" -var LONECARET = R++ -src[LONECARET] = '(?:\\^)' +//API +exports.isConforming = function(token) { + return ( + token.name === VALID_DOCTYPE_NAME && + token.publicId === null && + (token.systemId === null || token.systemId === VALID_SYSTEM_ID) + ); +}; -var CARETTRIM = R++ -src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+' -re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g') -var caretTrimReplace = '$1^' +exports.getDocumentMode = function(token) { + if (token.name !== VALID_DOCTYPE_NAME) { + return DOCUMENT_MODE.QUIRKS; + } -var CARET = R++ -src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$' -var CARETLOOSE = R++ -src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$' + const systemId = token.systemId; -// A simple gt/lt/eq thing, or just "" to indicate "any version" -var COMPARATORLOOSE = R++ -src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$' -var COMPARATOR = R++ -src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$' + if (systemId && systemId.toLowerCase() === QUIRKS_MODE_SYSTEM_ID) { + return DOCUMENT_MODE.QUIRKS; + } -// An expression to strip any whitespace between the gtlt and the thing -// it modifies, so that `> 1.2.3` ==> `>1.2.3` -var COMPARATORTRIM = R++ -src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] + - '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')' + let publicId = token.publicId; -// this one has to use the /g flag -re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g') -var comparatorTrimReplace = '$1$2$3' + if (publicId !== null) { + publicId = publicId.toLowerCase(); -// Something like `1.2.3 - 1.2.4` -// Note that these all use the loose form, because they'll be -// checked against either the strict or loose comparator form -// later. -var HYPHENRANGE = R++ -src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' + - '\\s+-\\s+' + - '(' + src[XRANGEPLAIN] + ')' + - '\\s*$' + if (QUIRKS_MODE_PUBLIC_IDS.indexOf(publicId) > -1) { + return DOCUMENT_MODE.QUIRKS; + } -var HYPHENRANGELOOSE = R++ -src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' + - '\\s+-\\s+' + - '(' + src[XRANGEPLAINLOOSE] + ')' + - '\\s*$' + let prefixes = systemId === null ? QUIRKS_MODE_NO_SYSTEM_ID_PUBLIC_ID_PREFIXES : QUIRKS_MODE_PUBLIC_ID_PREFIXES; -// Star ranges basically just allow anything at all. -var STAR = R++ -src[STAR] = '(<|>)?=?\\s*\\*' + if (hasPrefix(publicId, prefixes)) { + return DOCUMENT_MODE.QUIRKS; + } -// Compile to actual regexp objects. -// All are flag-free, unless they were created above with a flag. -for (var i = 0; i < R; i++) { - debug(i, src[i]) - if (!re[i]) { - re[i] = new RegExp(src[i]) - } -} + prefixes = + systemId === null ? LIMITED_QUIRKS_PUBLIC_ID_PREFIXES : LIMITED_QUIRKS_WITH_SYSTEM_ID_PUBLIC_ID_PREFIXES; -exports.parse = parse -function parse (version, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false + if (hasPrefix(publicId, prefixes)) { + return DOCUMENT_MODE.LIMITED_QUIRKS; + } } - } - if (version instanceof SemVer) { - return version - } + return DOCUMENT_MODE.NO_QUIRKS; +}; - if (typeof version !== 'string') { - return null - } +exports.serializeContent = function(name, publicId, systemId) { + let str = '!DOCTYPE '; - if (version.length > MAX_LENGTH) { - return null - } + if (name) { + str += name; + } - var r = options.loose ? re[LOOSE] : re[FULL] - if (!r.test(version)) { - return null - } + if (publicId) { + str += ' PUBLIC ' + enquoteDoctypeId(publicId); + } else if (systemId) { + str += ' SYSTEM'; + } - try { - return new SemVer(version, options) - } catch (er) { - return null - } -} + if (systemId !== null) { + str += ' ' + enquoteDoctypeId(systemId); + } -exports.valid = valid -function valid (version, options) { - var v = parse(version, options) - return v ? v.version : null -} + return str; +}; -exports.clean = clean -function clean (version, options) { - var s = parse(version.trim().replace(/^[=v]+/, ''), options) - return s ? s.version : null -} -exports.SemVer = SemVer +/***/ }), -function SemVer (version, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } - if (version instanceof SemVer) { - if (version.loose === options.loose) { - return version - } else { - version = version.version - } - } else if (typeof version !== 'string') { - throw new TypeError('Invalid Version: ' + version) - } +/***/ 69338: +/***/ ((__unused_webpack_module, exports) => { - if (version.length > MAX_LENGTH) { - throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') - } +"use strict"; - if (!(this instanceof SemVer)) { - return new SemVer(version, options) - } - debug('SemVer', version, options) - this.options = options - this.loose = !!options.loose +const NS = (exports.NAMESPACES = { + HTML: 'http://www.w3.org/1999/xhtml', + MATHML: 'http://www.w3.org/1998/Math/MathML', + SVG: 'http://www.w3.org/2000/svg', + XLINK: 'http://www.w3.org/1999/xlink', + XML: 'http://www.w3.org/XML/1998/namespace', + XMLNS: 'http://www.w3.org/2000/xmlns/' +}); - var m = version.trim().match(options.loose ? re[LOOSE] : re[FULL]) +exports.ATTRS = { + TYPE: 'type', + ACTION: 'action', + ENCODING: 'encoding', + PROMPT: 'prompt', + NAME: 'name', + COLOR: 'color', + FACE: 'face', + SIZE: 'size' +}; - if (!m) { - throw new TypeError('Invalid Version: ' + version) - } +exports.DOCUMENT_MODE = { + NO_QUIRKS: 'no-quirks', + QUIRKS: 'quirks', + LIMITED_QUIRKS: 'limited-quirks' +}; - this.raw = version +const $ = (exports.TAG_NAMES = { + A: 'a', + ADDRESS: 'address', + ANNOTATION_XML: 'annotation-xml', + APPLET: 'applet', + AREA: 'area', + ARTICLE: 'article', + ASIDE: 'aside', - // these are actually numbers - this.major = +m[1] - this.minor = +m[2] - this.patch = +m[3] + B: 'b', + BASE: 'base', + BASEFONT: 'basefont', + BGSOUND: 'bgsound', + BIG: 'big', + BLOCKQUOTE: 'blockquote', + BODY: 'body', + BR: 'br', + BUTTON: 'button', - if (this.major > MAX_SAFE_INTEGER || this.major < 0) { - throw new TypeError('Invalid major version') - } + CAPTION: 'caption', + CENTER: 'center', + CODE: 'code', + COL: 'col', + COLGROUP: 'colgroup', - if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { - throw new TypeError('Invalid minor version') - } + DD: 'dd', + DESC: 'desc', + DETAILS: 'details', + DIALOG: 'dialog', + DIR: 'dir', + DIV: 'div', + DL: 'dl', + DT: 'dt', - if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { - throw new TypeError('Invalid patch version') - } + EM: 'em', + EMBED: 'embed', - // numberify any prerelease numeric ids - if (!m[4]) { - this.prerelease = [] - } else { - this.prerelease = m[4].split('.').map(function (id) { - if (/^[0-9]+$/.test(id)) { - var num = +id - if (num >= 0 && num < MAX_SAFE_INTEGER) { - return num - } - } - return id - }) - } + FIELDSET: 'fieldset', + FIGCAPTION: 'figcaption', + FIGURE: 'figure', + FONT: 'font', + FOOTER: 'footer', + FOREIGN_OBJECT: 'foreignObject', + FORM: 'form', + FRAME: 'frame', + FRAMESET: 'frameset', - this.build = m[5] ? m[5].split('.') : [] - this.format() -} + H1: 'h1', + H2: 'h2', + H3: 'h3', + H4: 'h4', + H5: 'h5', + H6: 'h6', + HEAD: 'head', + HEADER: 'header', + HGROUP: 'hgroup', + HR: 'hr', + HTML: 'html', -SemVer.prototype.format = function () { - this.version = this.major + '.' + this.minor + '.' + this.patch - if (this.prerelease.length) { - this.version += '-' + this.prerelease.join('.') - } - return this.version -} + I: 'i', + IMG: 'img', + IMAGE: 'image', + INPUT: 'input', + IFRAME: 'iframe', -SemVer.prototype.toString = function () { - return this.version -} + KEYGEN: 'keygen', -SemVer.prototype.compare = function (other) { - debug('SemVer.compare', this.version, this.options, other) - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } + LABEL: 'label', + LI: 'li', + LINK: 'link', + LISTING: 'listing', - return this.compareMain(other) || this.comparePre(other) -} + MAIN: 'main', + MALIGNMARK: 'malignmark', + MARQUEE: 'marquee', + MATH: 'math', + MENU: 'menu', + META: 'meta', + MGLYPH: 'mglyph', + MI: 'mi', + MO: 'mo', + MN: 'mn', + MS: 'ms', + MTEXT: 'mtext', -SemVer.prototype.compareMain = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } + NAV: 'nav', + NOBR: 'nobr', + NOFRAMES: 'noframes', + NOEMBED: 'noembed', + NOSCRIPT: 'noscript', - return compareIdentifiers(this.major, other.major) || - compareIdentifiers(this.minor, other.minor) || - compareIdentifiers(this.patch, other.patch) -} + OBJECT: 'object', + OL: 'ol', + OPTGROUP: 'optgroup', + OPTION: 'option', -SemVer.prototype.comparePre = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } + P: 'p', + PARAM: 'param', + PLAINTEXT: 'plaintext', + PRE: 'pre', - // NOT having a prerelease is > having one - if (this.prerelease.length && !other.prerelease.length) { - return -1 - } else if (!this.prerelease.length && other.prerelease.length) { - return 1 - } else if (!this.prerelease.length && !other.prerelease.length) { - return 0 - } + RB: 'rb', + RP: 'rp', + RT: 'rt', + RTC: 'rtc', + RUBY: 'ruby', - var i = 0 - do { - var a = this.prerelease[i] - var b = other.prerelease[i] - debug('prerelease compare', i, a, b) - if (a === undefined && b === undefined) { - return 0 - } else if (b === undefined) { - return 1 - } else if (a === undefined) { - return -1 - } else if (a === b) { - continue - } else { - return compareIdentifiers(a, b) - } - } while (++i) -} + S: 's', + SCRIPT: 'script', + SECTION: 'section', + SELECT: 'select', + SOURCE: 'source', + SMALL: 'small', + SPAN: 'span', + STRIKE: 'strike', + STRONG: 'strong', + STYLE: 'style', + SUB: 'sub', + SUMMARY: 'summary', + SUP: 'sup', -// preminor will bump the version up to the next minor release, and immediately -// down to pre-release. premajor and prepatch work the same way. -SemVer.prototype.inc = function (release, identifier) { - switch (release) { - case 'premajor': - this.prerelease.length = 0 - this.patch = 0 - this.minor = 0 - this.major++ - this.inc('pre', identifier) - break - case 'preminor': - this.prerelease.length = 0 - this.patch = 0 - this.minor++ - this.inc('pre', identifier) - break - case 'prepatch': - // If this is already a prerelease, it will bump to the next version - // drop any prereleases that might already exist, since they are not - // relevant at this point. - this.prerelease.length = 0 - this.inc('patch', identifier) - this.inc('pre', identifier) - break - // If the input is a non-prerelease version, this acts the same as - // prepatch. - case 'prerelease': - if (this.prerelease.length === 0) { - this.inc('patch', identifier) - } - this.inc('pre', identifier) - break + TABLE: 'table', + TBODY: 'tbody', + TEMPLATE: 'template', + TEXTAREA: 'textarea', + TFOOT: 'tfoot', + TD: 'td', + TH: 'th', + THEAD: 'thead', + TITLE: 'title', + TR: 'tr', + TRACK: 'track', + TT: 'tt', - case 'major': - // If this is a pre-major version, bump up to the same major version. - // Otherwise increment major. - // 1.0.0-5 bumps to 1.0.0 - // 1.1.0 bumps to 2.0.0 - if (this.minor !== 0 || - this.patch !== 0 || - this.prerelease.length === 0) { - this.major++ - } - this.minor = 0 - this.patch = 0 - this.prerelease = [] - break - case 'minor': - // If this is a pre-minor version, bump up to the same minor version. - // Otherwise increment minor. - // 1.2.0-5 bumps to 1.2.0 - // 1.2.1 bumps to 1.3.0 - if (this.patch !== 0 || this.prerelease.length === 0) { - this.minor++ - } - this.patch = 0 - this.prerelease = [] - break - case 'patch': - // If this is not a pre-release version, it will increment the patch. - // If it is a pre-release it will bump up to the same patch version. - // 1.2.0-5 patches to 1.2.0 - // 1.2.0 patches to 1.2.1 - if (this.prerelease.length === 0) { - this.patch++ - } - this.prerelease = [] - break - // This probably shouldn't be used publicly. - // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. - case 'pre': - if (this.prerelease.length === 0) { - this.prerelease = [0] - } else { - var i = this.prerelease.length - while (--i >= 0) { - if (typeof this.prerelease[i] === 'number') { - this.prerelease[i]++ - i = -2 - } - } - if (i === -1) { - // didn't increment anything - this.prerelease.push(0) - } - } - if (identifier) { - // 1.2.0-beta.1 bumps to 1.2.0-beta.2, - // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 - if (this.prerelease[0] === identifier) { - if (isNaN(this.prerelease[1])) { - this.prerelease = [identifier, 0] - } - } else { - this.prerelease = [identifier, 0] - } - } - break + U: 'u', + UL: 'ul', - default: - throw new Error('invalid increment argument: ' + release) - } - this.format() - this.raw = this.version - return this -} + SVG: 'svg', -exports.inc = inc -function inc (version, release, loose, identifier) { - if (typeof (loose) === 'string') { - identifier = loose - loose = undefined - } + VAR: 'var', - try { - return new SemVer(version, loose).inc(release, identifier).version - } catch (er) { - return null - } -} + WBR: 'wbr', -exports.diff = diff -function diff (version1, version2) { - if (eq(version1, version2)) { - return null - } else { - var v1 = parse(version1) - var v2 = parse(version2) - var prefix = '' - if (v1.prerelease.length || v2.prerelease.length) { - prefix = 'pre' - var defaultResult = 'prerelease' - } - for (var key in v1) { - if (key === 'major' || key === 'minor' || key === 'patch') { - if (v1[key] !== v2[key]) { - return prefix + key - } - } + XMP: 'xmp' +}); + +exports.SPECIAL_ELEMENTS = { + [NS.HTML]: { + [$.ADDRESS]: true, + [$.APPLET]: true, + [$.AREA]: true, + [$.ARTICLE]: true, + [$.ASIDE]: true, + [$.BASE]: true, + [$.BASEFONT]: true, + [$.BGSOUND]: true, + [$.BLOCKQUOTE]: true, + [$.BODY]: true, + [$.BR]: true, + [$.BUTTON]: true, + [$.CAPTION]: true, + [$.CENTER]: true, + [$.COL]: true, + [$.COLGROUP]: true, + [$.DD]: true, + [$.DETAILS]: true, + [$.DIR]: true, + [$.DIV]: true, + [$.DL]: true, + [$.DT]: true, + [$.EMBED]: true, + [$.FIELDSET]: true, + [$.FIGCAPTION]: true, + [$.FIGURE]: true, + [$.FOOTER]: true, + [$.FORM]: true, + [$.FRAME]: true, + [$.FRAMESET]: true, + [$.H1]: true, + [$.H2]: true, + [$.H3]: true, + [$.H4]: true, + [$.H5]: true, + [$.H6]: true, + [$.HEAD]: true, + [$.HEADER]: true, + [$.HGROUP]: true, + [$.HR]: true, + [$.HTML]: true, + [$.IFRAME]: true, + [$.IMG]: true, + [$.INPUT]: true, + [$.LI]: true, + [$.LINK]: true, + [$.LISTING]: true, + [$.MAIN]: true, + [$.MARQUEE]: true, + [$.MENU]: true, + [$.META]: true, + [$.NAV]: true, + [$.NOEMBED]: true, + [$.NOFRAMES]: true, + [$.NOSCRIPT]: true, + [$.OBJECT]: true, + [$.OL]: true, + [$.P]: true, + [$.PARAM]: true, + [$.PLAINTEXT]: true, + [$.PRE]: true, + [$.SCRIPT]: true, + [$.SECTION]: true, + [$.SELECT]: true, + [$.SOURCE]: true, + [$.STYLE]: true, + [$.SUMMARY]: true, + [$.TABLE]: true, + [$.TBODY]: true, + [$.TD]: true, + [$.TEMPLATE]: true, + [$.TEXTAREA]: true, + [$.TFOOT]: true, + [$.TH]: true, + [$.THEAD]: true, + [$.TITLE]: true, + [$.TR]: true, + [$.TRACK]: true, + [$.UL]: true, + [$.WBR]: true, + [$.XMP]: true + }, + [NS.MATHML]: { + [$.MI]: true, + [$.MO]: true, + [$.MN]: true, + [$.MS]: true, + [$.MTEXT]: true, + [$.ANNOTATION_XML]: true + }, + [NS.SVG]: { + [$.TITLE]: true, + [$.FOREIGN_OBJECT]: true, + [$.DESC]: true } - return defaultResult // may be undefined - } -} - -exports.compareIdentifiers = compareIdentifiers - -var numeric = /^[0-9]+$/ -function compareIdentifiers (a, b) { - var anum = numeric.test(a) - var bnum = numeric.test(b) - - if (anum && bnum) { - a = +a - b = +b - } - - return a === b ? 0 - : (anum && !bnum) ? -1 - : (bnum && !anum) ? 1 - : a < b ? -1 - : 1 -} +}; -exports.rcompareIdentifiers = rcompareIdentifiers -function rcompareIdentifiers (a, b) { - return compareIdentifiers(b, a) -} -exports.major = major -function major (a, loose) { - return new SemVer(a, loose).major -} +/***/ }), -exports.minor = minor -function minor (a, loose) { - return new SemVer(a, loose).minor -} +/***/ 38714: +/***/ ((module) => { -exports.patch = patch -function patch (a, loose) { - return new SemVer(a, loose).patch -} +"use strict"; -exports.compare = compare -function compare (a, b, loose) { - return new SemVer(a, loose).compare(new SemVer(b, loose)) -} -exports.compareLoose = compareLoose -function compareLoose (a, b) { - return compare(a, b, true) +function posix(path) { + return path.charAt(0) === '/'; } -exports.rcompare = rcompare -function rcompare (a, b, loose) { - return compare(b, a, loose) -} +function win32(path) { + // https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56 + var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/; + var result = splitDeviceRe.exec(path); + var device = result[1] || ''; + var isUnc = Boolean(device && device.charAt(1) !== ':'); -exports.sort = sort -function sort (list, loose) { - return list.sort(function (a, b) { - return exports.compare(a, b, loose) - }) + // UNC paths are always absolute + return Boolean(result[2] || isUnc); } -exports.rsort = rsort -function rsort (list, loose) { - return list.sort(function (a, b) { - return exports.rcompare(a, b, loose) - }) -} +module.exports = process.platform === 'win32' ? win32 : posix; +module.exports.posix = posix; +module.exports.win32 = win32; -exports.gt = gt -function gt (a, b, loose) { - return compare(a, b, loose) > 0 -} -exports.lt = lt -function lt (a, b, loose) { - return compare(a, b, loose) < 0 -} +/***/ }), -exports.eq = eq -function eq (a, b, loose) { - return compare(a, b, loose) === 0 -} +/***/ 85644: +/***/ (function(module) { -exports.neq = neq -function neq (a, b, loose) { - return compare(a, b, loose) !== 0 -} +// Generated by CoffeeScript 1.12.2 +(function() { + var getNanoSeconds, hrtime, loadTime, moduleLoadTime, nodeLoadTime, upTime; -exports.gte = gte -function gte (a, b, loose) { - return compare(a, b, loose) >= 0 -} + if ((typeof performance !== "undefined" && performance !== null) && performance.now) { + module.exports = function() { + return performance.now(); + }; + } else if ((typeof process !== "undefined" && process !== null) && process.hrtime) { + module.exports = function() { + return (getNanoSeconds() - nodeLoadTime) / 1e6; + }; + hrtime = process.hrtime; + getNanoSeconds = function() { + var hr; + hr = hrtime(); + return hr[0] * 1e9 + hr[1]; + }; + moduleLoadTime = getNanoSeconds(); + upTime = process.uptime() * 1e9; + nodeLoadTime = moduleLoadTime - upTime; + } else if (Date.now) { + module.exports = function() { + return Date.now() - loadTime; + }; + loadTime = Date.now(); + } else { + module.exports = function() { + return new Date().getTime() - loadTime; + }; + loadTime = new Date().getTime(); + } -exports.lte = lte -function lte (a, b, loose) { - return compare(a, b, loose) <= 0 -} +}).call(this); -exports.cmp = cmp -function cmp (a, op, b, loose) { - switch (op) { - case '===': - if (typeof a === 'object') - a = a.version - if (typeof b === 'object') - b = b.version - return a === b +//# sourceMappingURL=performance-now.js.map - case '!==': - if (typeof a === 'object') - a = a.version - if (typeof b === 'object') - b = b.version - return a !== b - case '': - case '=': - case '==': - return eq(a, b, loose) +/***/ }), - case '!=': - return neq(a, b, loose) +/***/ 38961: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - case '>': - return gt(a, b, loose) +"use strict"; - case '>=': - return gte(a, b, loose) - case '<': - return lt(a, b, loose) +var url = __nccwpck_require__(78835) +var fs = __nccwpck_require__(35747) - case '<=': - return lte(a, b, loose) +//Parse method copied from https://github.com/brianc/node-postgres +//Copyright (c) 2010-2014 Brian Carlson (brian.m.carlson@gmail.com) +//MIT License - default: - throw new TypeError('Invalid operator: ' + op) +//parses a connection string +function parse(str) { + //unix socket + if (str.charAt(0) === '/') { + var config = str.split(' ') + return { host: config[0], database: config[1] } } -} -exports.Comparator = Comparator -function Comparator (comp, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false + // url parse expects spaces encoded as %20 + var result = url.parse( + / |%[^a-f0-9]|%[a-f0-9][^a-f0-9]/i.test(str) ? encodeURI(str).replace(/\%25(\d\d)/g, '%$1') : str, + true + ) + var config = result.query + for (var k in config) { + if (Array.isArray(config[k])) { + config[k] = config[k][config[k].length - 1] } } - if (comp instanceof Comparator) { - if (comp.loose === !!options.loose) { - return comp - } else { - comp = comp.value - } + var auth = (result.auth || ':').split(':') + config.user = auth[0] + config.password = auth.splice(1).join(':') + + config.port = result.port + if (result.protocol == 'socket:') { + config.host = decodeURI(result.pathname) + config.database = result.query.db + config.client_encoding = result.query.encoding + return config + } + if (!config.host) { + // Only set the host if there is no equivalent query param. + config.host = result.hostname } - if (!(this instanceof Comparator)) { - return new Comparator(comp, options) + // If the host is missing it might be a URL-encoded path to a socket. + var pathname = result.pathname + if (!config.host && pathname && /^%2f/i.test(pathname)) { + var pathnameSplit = pathname.split('/') + config.host = decodeURIComponent(pathnameSplit[0]) + pathname = pathnameSplit.splice(1).join('/') + } + // result.pathname is not always guaranteed to have a '/' prefix (e.g. relative urls) + // only strip the slash if it is present. + if (pathname && pathname.charAt(0) === '/') { + pathname = pathname.slice(1) || null } + config.database = pathname && decodeURI(pathname) - debug('comparator', comp, options) - this.options = options - this.loose = !!options.loose - this.parse(comp) + if (config.ssl === 'true' || config.ssl === '1') { + config.ssl = true + } - if (this.semver === ANY) { - this.value = '' - } else { - this.value = this.operator + this.semver.version + if (config.ssl === '0') { + config.ssl = false } - debug('comp', this) -} + if (config.sslcert || config.sslkey || config.sslrootcert || config.sslmode) { + config.ssl = {} + } -var ANY = {} -Comparator.prototype.parse = function (comp) { - var r = this.options.loose ? re[COMPARATORLOOSE] : re[COMPARATOR] - var m = comp.match(r) + if (config.sslcert) { + config.ssl.cert = fs.readFileSync(config.sslcert).toString() + } - if (!m) { - throw new TypeError('Invalid comparator: ' + comp) + if (config.sslkey) { + config.ssl.key = fs.readFileSync(config.sslkey).toString() } - this.operator = m[1] - if (this.operator === '=') { - this.operator = '' + if (config.sslrootcert) { + config.ssl.ca = fs.readFileSync(config.sslrootcert).toString() } - // if it literally is just '>' or '' then allow anything. - if (!m[2]) { - this.semver = ANY - } else { - this.semver = new SemVer(m[2], this.options.loose) + switch (config.sslmode) { + case 'disable': { + config.ssl = false + break + } + case 'prefer': + case 'require': + case 'verify-ca': + case 'verify-full': { + break + } + case 'no-verify': { + config.ssl.rejectUnauthorized = false + break + } } -} -Comparator.prototype.toString = function () { - return this.value + return config } -Comparator.prototype.test = function (version) { - debug('Comparator.test', version, this.options.loose) +module.exports = parse - if (this.semver === ANY) { - return true - } +parse.parse = parse - if (typeof version === 'string') { - version = new SemVer(version, this.options) - } - return cmp(version, this.operator, this.semver, this.options) -} +/***/ }), -Comparator.prototype.intersects = function (comp, options) { - if (!(comp instanceof Comparator)) { - throw new TypeError('a Comparator is required') - } +/***/ 41180: +/***/ ((module) => { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } +"use strict"; - var rangeTmp - if (this.operator === '') { - rangeTmp = new Range(comp.value, options) - return satisfies(this.value, rangeTmp, options) - } else if (comp.operator === '') { - rangeTmp = new Range(this.value, options) - return satisfies(comp.semver, rangeTmp, options) - } +// selected so (BASE - 1) * 0x100000000 + 0xffffffff is a safe integer +var BASE = 1000000; - var sameDirectionIncreasing = - (this.operator === '>=' || this.operator === '>') && - (comp.operator === '>=' || comp.operator === '>') - var sameDirectionDecreasing = - (this.operator === '<=' || this.operator === '<') && - (comp.operator === '<=' || comp.operator === '<') - var sameSemVer = this.semver.version === comp.semver.version - var differentDirectionsInclusive = - (this.operator === '>=' || this.operator === '<=') && - (comp.operator === '>=' || comp.operator === '<=') - var oppositeDirectionsLessThan = - cmp(this.semver, '<', comp.semver, options) && - ((this.operator === '>=' || this.operator === '>') && - (comp.operator === '<=' || comp.operator === '<')) - var oppositeDirectionsGreaterThan = - cmp(this.semver, '>', comp.semver, options) && - ((this.operator === '<=' || this.operator === '<') && - (comp.operator === '>=' || comp.operator === '>')) +function readInt8(buffer) { + var high = buffer.readInt32BE(0); + var low = buffer.readUInt32BE(4); + var sign = ''; - return sameDirectionIncreasing || sameDirectionDecreasing || - (sameSemVer && differentDirectionsInclusive) || - oppositeDirectionsLessThan || oppositeDirectionsGreaterThan -} + if (high < 0) { + high = ~high + (low === 0); + low = (~low + 1) >>> 0; + sign = '-'; + } -exports.Range = Range -function Range (range, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } + var result = ''; + var carry; + var t; + var digits; + var pad; + var l; + var i; - if (range instanceof Range) { - if (range.loose === !!options.loose && - range.includePrerelease === !!options.includePrerelease) { - return range - } else { - return new Range(range.raw, options) - } - } + { + carry = high % BASE; + high = high / BASE >>> 0; - if (range instanceof Comparator) { - return new Range(range.value, options) - } + t = 0x100000000 * carry + low; + low = t / BASE >>> 0; + digits = '' + (t - BASE * low); - if (!(this instanceof Range)) { - return new Range(range, options) - } + if (low === 0 && high === 0) { + return sign + digits + result; + } - this.options = options - this.loose = !!options.loose - this.includePrerelease = !!options.includePrerelease + pad = ''; + l = 6 - digits.length; - // First, split based on boolean or || - this.raw = range - this.set = range.split(/\s*\|\|\s*/).map(function (range) { - return this.parseRange(range.trim()) - }, this).filter(function (c) { - // throw out any that are not relevant for whatever reason - return c.length - }) + for (i = 0; i < l; i++) { + pad += '0'; + } - if (!this.set.length) { - throw new TypeError('Invalid SemVer Range: ' + range) - } + result = pad + digits + result; + } - this.format() -} + { + carry = high % BASE; + high = high / BASE >>> 0; -Range.prototype.format = function () { - this.range = this.set.map(function (comps) { - return comps.join(' ').trim() - }).join('||').trim() - return this.range -} + t = 0x100000000 * carry + low; + low = t / BASE >>> 0; + digits = '' + (t - BASE * low); -Range.prototype.toString = function () { - return this.range + if (low === 0 && high === 0) { + return sign + digits + result; + } + + pad = ''; + l = 6 - digits.length; + + for (i = 0; i < l; i++) { + pad += '0'; + } + + result = pad + digits + result; + } + + { + carry = high % BASE; + high = high / BASE >>> 0; + + t = 0x100000000 * carry + low; + low = t / BASE >>> 0; + digits = '' + (t - BASE * low); + + if (low === 0 && high === 0) { + return sign + digits + result; + } + + pad = ''; + l = 6 - digits.length; + + for (i = 0; i < l; i++) { + pad += '0'; + } + + result = pad + digits + result; + } + + { + carry = high % BASE; + t = 0x100000000 * carry + low; + digits = '' + t % BASE; + + return sign + digits + result; + } } -Range.prototype.parseRange = function (range) { - var loose = this.options.loose - range = range.trim() - // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` - var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE] - range = range.replace(hr, hyphenReplace) - debug('hyphen replace', range) - // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` - range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace) - debug('comparator trim', range, re[COMPARATORTRIM]) +module.exports = readInt8; - // `~ 1.2.3` => `~1.2.3` - range = range.replace(re[TILDETRIM], tildeTrimReplace) - // `^ 1.2.3` => `^1.2.3` - range = range.replace(re[CARETTRIM], caretTrimReplace) +/***/ }), - // normalize spaces - range = range.split(/\s+/).join(' ') +/***/ 99599: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // At this point, the range is completely trimmed and - // ready to be split into comparators. +"use strict"; - var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR] - var set = range.split(' ').map(function (comp) { - return parseComparator(comp, this.options) - }, this).join(' ').split(/\s+/) - if (this.options.loose) { - // in loose mode, throw out any that are not valid comparators - set = set.filter(function (comp) { - return !!comp.match(compRe) - }) - } - set = set.map(function (comp) { - return new Comparator(comp, this.options) - }, this) +const EventEmitter = __nccwpck_require__(28614).EventEmitter - return set +const NOOP = function () {} + +const removeWhere = (list, predicate) => { + const i = list.findIndex(predicate) + + return i === -1 ? undefined : list.splice(i, 1)[0] } -Range.prototype.intersects = function (range, options) { - if (!(range instanceof Range)) { - throw new TypeError('a Range is required') +class IdleItem { + constructor(client, idleListener, timeoutId) { + this.client = client + this.idleListener = idleListener + this.timeoutId = timeoutId } - - return this.set.some(function (thisComparators) { - return thisComparators.every(function (thisComparator) { - return range.set.some(function (rangeComparators) { - return rangeComparators.every(function (rangeComparator) { - return thisComparator.intersects(rangeComparator, options) - }) - }) - }) - }) } -// Mostly just for testing and legacy API reasons -exports.toComparators = toComparators -function toComparators (range, options) { - return new Range(range, options).set.map(function (comp) { - return comp.map(function (c) { - return c.value - }).join(' ').trim().split(' ') - }) +class PendingItem { + constructor(callback) { + this.callback = callback + } } -// comprised of xranges, tildes, stars, and gtlt's at this point. -// already replaced the hyphen ranges -// turn into a set of JUST comparators. -function parseComparator (comp, options) { - debug('comp', comp, options) - comp = replaceCarets(comp, options) - debug('caret', comp) - comp = replaceTildes(comp, options) - debug('tildes', comp) - comp = replaceXRanges(comp, options) - debug('xrange', comp) - comp = replaceStars(comp, options) - debug('stars', comp) - return comp +function throwOnDoubleRelease() { + throw new Error('Release called on client which has already been released to the pool.') } -function isX (id) { - return !id || id.toLowerCase() === 'x' || id === '*' +function promisify(Promise, callback) { + if (callback) { + return { callback: callback, result: undefined } + } + let rej + let res + const cb = function (err, client) { + err ? rej(err) : res(client) + } + const result = new Promise(function (resolve, reject) { + res = resolve + rej = reject + }) + return { callback: cb, result: result } } -// ~, ~> --> * (any, kinda silly) -// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 -// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 -// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 -// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 -// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 -function replaceTildes (comp, options) { - return comp.trim().split(/\s+/).map(function (comp) { - return replaceTilde(comp, options) - }).join(' ') +function makeIdleListener(pool, client) { + return function idleListener(err) { + err.client = client + + client.removeListener('error', idleListener) + client.on('error', () => { + pool.log('additional client error after disconnection due to error', err) + }) + pool._remove(client) + // TODO - document that once the pool emits an error + // the client has already been closed & purged and is unusable + pool.emit('error', err, client) + } } -function replaceTilde (comp, options) { - var r = options.loose ? re[TILDELOOSE] : re[TILDE] - return comp.replace(r, function (_, M, m, p, pr) { - debug('tilde', comp, _, M, m, p, pr) - var ret +class Pool extends EventEmitter { + constructor(options, Client) { + super() + this.options = Object.assign({}, options) - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (isX(p)) { - // ~1.2 == >=1.2.0 <1.3.0 - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' - } else if (pr) { - debug('replaceTilde pr', pr) - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + (+m + 1) + '.0' - } else { - // ~1.2.3 == >=1.2.3 <1.3.0 - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + (+m + 1) + '.0' + if (options != null && 'password' in options) { + // "hiding" the password so it doesn't show up in stack traces + // or if the client is console.logged + Object.defineProperty(this.options, 'password', { + configurable: true, + enumerable: false, + writable: true, + value: options.password, + }) + } + if (options != null && options.ssl && options.ssl.key) { + // "hiding" the ssl->key so it doesn't show up in stack traces + // or if the client is console.logged + Object.defineProperty(this.options.ssl, 'key', { + enumerable: false, + }) } - debug('tilde return', ret) - return ret - }) -} + this.options.max = this.options.max || this.options.poolSize || 10 + this.options.maxUses = this.options.maxUses || Infinity + this.log = this.options.log || function () {} + this.Client = this.options.Client || Client || __nccwpck_require__(44194).Client + this.Promise = this.options.Promise || global.Promise -// ^ --> * (any, kinda silly) -// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 -// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 -// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 -// ^1.2.3 --> >=1.2.3 <2.0.0 -// ^1.2.0 --> >=1.2.0 <2.0.0 -function replaceCarets (comp, options) { - return comp.trim().split(/\s+/).map(function (comp) { - return replaceCaret(comp, options) - }).join(' ') -} + if (typeof this.options.idleTimeoutMillis === 'undefined') { + this.options.idleTimeoutMillis = 10000 + } -function replaceCaret (comp, options) { - debug('caret', comp, options) - var r = options.loose ? re[CARETLOOSE] : re[CARET] - return comp.replace(r, function (_, M, m, p, pr) { - debug('caret', comp, _, M, m, p, pr) - var ret + this._clients = [] + this._idle = [] + this._pendingQueue = [] + this._endCallback = undefined + this.ending = false + this.ended = false + } - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (isX(p)) { - if (M === '0') { - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' - } else { - ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' - } - } else if (pr) { - debug('replaceCaret pr', pr) - if (M === '0') { - if (m === '0') { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + m + '.' + (+p + 1) - } else { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + (+m + 1) + '.0' - } - } else { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + (+M + 1) + '.0.0' + _isFull() { + return this._clients.length >= this.options.max + } + + _pulseQueue() { + this.log('pulse queue') + if (this.ended) { + this.log('pulse queue ended') + return + } + if (this.ending) { + this.log('pulse queue on ending') + if (this._idle.length) { + this._idle.slice().map((item) => { + this._remove(item.client) + }) } - } else { - debug('no pr') - if (M === '0') { - if (m === '0') { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + m + '.' + (+p + 1) - } else { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + (+m + 1) + '.0' - } - } else { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + (+M + 1) + '.0.0' + if (!this._clients.length) { + this.ended = true + this._endCallback() } + return + } + // if we don't have any waiting, do nothing + if (!this._pendingQueue.length) { + this.log('no queued requests') + return + } + // if we don't have any idle clients and we have no more room do nothing + if (!this._idle.length && this._isFull()) { + return } + const pendingItem = this._pendingQueue.shift() + if (this._idle.length) { + const idleItem = this._idle.pop() + clearTimeout(idleItem.timeoutId) + const client = idleItem.client + const idleListener = idleItem.idleListener - debug('caret return', ret) - return ret - }) -} + return this._acquireClient(client, pendingItem, idleListener, false) + } + if (!this._isFull()) { + return this.newClient(pendingItem) + } + throw new Error('unexpected condition') + } -function replaceXRanges (comp, options) { - debug('replaceXRanges', comp, options) - return comp.split(/\s+/).map(function (comp) { - return replaceXRange(comp, options) - }).join(' ') -} + _remove(client) { + const removed = removeWhere(this._idle, (item) => item.client === client) -function replaceXRange (comp, options) { - comp = comp.trim() - var r = options.loose ? re[XRANGELOOSE] : re[XRANGE] - return comp.replace(r, function (ret, gtlt, M, m, p, pr) { - debug('xRange', comp, ret, gtlt, M, m, p, pr) - var xM = isX(M) - var xm = xM || isX(m) - var xp = xm || isX(p) - var anyX = xp + if (removed !== undefined) { + clearTimeout(removed.timeoutId) + } - if (gtlt === '=' && anyX) { - gtlt = '' + this._clients = this._clients.filter((c) => c !== client) + client.end() + this.emit('remove', client) + } + + connect(cb) { + if (this.ending) { + const err = new Error('Cannot use a pool after calling end on the pool') + return cb ? cb(err) : this.Promise.reject(err) } - if (xM) { - if (gtlt === '>' || gtlt === '<') { - // nothing is allowed - ret = '<0.0.0' - } else { - // nothing is forbidden - ret = '*' + const response = promisify(this.Promise, cb) + const result = response.result + + // if we don't have to connect a new client, don't do so + if (this._clients.length >= this.options.max || this._idle.length) { + // if we have idle clients schedule a pulse immediately + if (this._idle.length) { + process.nextTick(() => this._pulseQueue()) } - } else if (gtlt && anyX) { - // we know patch is an x, because we have any x at all. - // replace X with 0 - if (xm) { - m = 0 + + if (!this.options.connectionTimeoutMillis) { + this._pendingQueue.push(new PendingItem(response.callback)) + return result } - p = 0 - if (gtlt === '>') { - // >1 => >=2.0.0 - // >1.2 => >=1.3.0 - // >1.2.3 => >= 1.2.4 - gtlt = '>=' - if (xm) { - M = +M + 1 - m = 0 - p = 0 - } else { - m = +m + 1 - p = 0 + const queueCallback = (err, res, done) => { + clearTimeout(tid) + response.callback(err, res, done) + } + + const pendingItem = new PendingItem(queueCallback) + + // set connection timeout on checking out an existing client + const tid = setTimeout(() => { + // remove the callback from pending waiters because + // we're going to call it with a timeout error + removeWhere(this._pendingQueue, (i) => i.callback === queueCallback) + pendingItem.timedOut = true + response.callback(new Error('timeout exceeded when trying to connect')) + }, this.options.connectionTimeoutMillis) + + this._pendingQueue.push(pendingItem) + return result + } + + this.newClient(new PendingItem(response.callback)) + + return result + } + + newClient(pendingItem) { + const client = new this.Client(this.options) + this._clients.push(client) + const idleListener = makeIdleListener(this, client) + + this.log('checking client timeout') + + // connection timeout logic + let tid + let timeoutHit = false + if (this.options.connectionTimeoutMillis) { + tid = setTimeout(() => { + this.log('ending client due to timeout') + timeoutHit = true + // force kill the node driver, and let libpq do its teardown + client.connection ? client.connection.stream.destroy() : client.end() + }, this.options.connectionTimeoutMillis) + } + + this.log('connecting new client') + client.connect((err) => { + if (tid) { + clearTimeout(tid) + } + client.on('error', idleListener) + if (err) { + this.log('client failed to connect', err) + // remove the dead client from our list of clients + this._clients = this._clients.filter((c) => c !== client) + if (timeoutHit) { + err.message = 'Connection terminated due to connection timeout' } - } else if (gtlt === '<=') { - // <=0.7.x is actually <0.8.0, since any 0.7.x should - // pass. Similarly, <=7.x is actually <8.0.0, etc. - gtlt = '<' - if (xm) { - M = +M + 1 - } else { - m = +m + 1 + + // this client won’t be released, so move on immediately + this._pulseQueue() + + if (!pendingItem.timedOut) { + pendingItem.callback(err, undefined, NOOP) } + } else { + this.log('new client connected') + + return this._acquireClient(client, pendingItem, idleListener, true) } + }) + } - ret = gtlt + M + '.' + m + '.' + p - } else if (xm) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (xp) { - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + // acquire a client for a pending work item + _acquireClient(client, pendingItem, idleListener, isNew) { + if (isNew) { + this.emit('connect', client) } - debug('xRange return', ret) + this.emit('acquire', client) - return ret - }) -} + client.release = this._releaseOnce(client, idleListener) -// Because * is AND-ed with everything else in the comparator, -// and '' means "any version", just remove the *s entirely. -function replaceStars (comp, options) { - debug('replaceStars', comp, options) - // Looseness is ignored here. star is always as loose as it gets! - return comp.trim().replace(re[STAR], '') -} + client.removeListener('error', idleListener) -// This function is passed to string.replace(re[HYPHENRANGE]) -// M, m, patch, prerelease, build -// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 -// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do -// 1.2 - 3.4 => >=1.2.0 <3.5.0 -function hyphenReplace ($0, - from, fM, fm, fp, fpr, fb, - to, tM, tm, tp, tpr, tb) { - if (isX(fM)) { - from = '' - } else if (isX(fm)) { - from = '>=' + fM + '.0.0' - } else if (isX(fp)) { - from = '>=' + fM + '.' + fm + '.0' - } else { - from = '>=' + from - } + if (!pendingItem.timedOut) { + if (isNew && this.options.verify) { + this.options.verify(client, (err) => { + if (err) { + client.release(err) + return pendingItem.callback(err, undefined, NOOP) + } - if (isX(tM)) { - to = '' - } else if (isX(tm)) { - to = '<' + (+tM + 1) + '.0.0' - } else if (isX(tp)) { - to = '<' + tM + '.' + (+tm + 1) + '.0' - } else if (tpr) { - to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr - } else { - to = '<=' + to + pendingItem.callback(undefined, client, client.release) + }) + } else { + pendingItem.callback(undefined, client, client.release) + } + } else { + if (isNew && this.options.verify) { + this.options.verify(client, client.release) + } else { + client.release() + } + } } - return (from + ' ' + to).trim() -} + // returns a function that wraps _release and throws if called more than once + _releaseOnce(client, idleListener) { + let released = false -// if ANY of the sets match ALL of its comparators, then pass -Range.prototype.test = function (version) { - if (!version) { - return false + return (err) => { + if (released) { + throwOnDoubleRelease() + } + + released = true + this._release(client, idleListener, err) + } } - if (typeof version === 'string') { - version = new SemVer(version, this.options) + // release a client back to the poll, include an error + // to remove it from the pool + _release(client, idleListener, err) { + client.on('error', idleListener) + + client._poolUseCount = (client._poolUseCount || 0) + 1 + + // TODO(bmc): expose a proper, public interface _queryable and _ending + if (err || this.ending || !client._queryable || client._ending || client._poolUseCount >= this.options.maxUses) { + if (client._poolUseCount >= this.options.maxUses) { + this.log('remove expended client') + } + this._remove(client) + this._pulseQueue() + return + } + + // idle timeout + let tid + if (this.options.idleTimeoutMillis) { + tid = setTimeout(() => { + this.log('remove idle client') + this._remove(client) + }, this.options.idleTimeoutMillis) + } + + this._idle.push(new IdleItem(client, idleListener, tid)) + this._pulseQueue() } - for (var i = 0; i < this.set.length; i++) { - if (testSet(this.set[i], version, this.options)) { - return true + query(text, values, cb) { + // guard clause against passing a function as the first parameter + if (typeof text === 'function') { + const response = promisify(this.Promise, text) + setImmediate(function () { + return response.callback(new Error('Passing a function as the first parameter to pool.query is not supported')) + }) + return response.result } - } - return false -} -function testSet (set, version, options) { - for (var i = 0; i < set.length; i++) { - if (!set[i].test(version)) { - return false + // allow plain text query without values + if (typeof values === 'function') { + cb = values + values = undefined } - } + const response = promisify(this.Promise, cb) + cb = response.callback - if (version.prerelease.length && !options.includePrerelease) { - // Find the set of versions that are allowed to have prereleases - // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 - // That should allow `1.2.3-pr.2` to pass. - // However, `1.2.4-alpha.notready` should NOT be allowed, - // even though it's within the range set by the comparators. - for (i = 0; i < set.length; i++) { - debug(set[i].semver) - if (set[i].semver === ANY) { - continue + this.connect((err, client) => { + if (err) { + return cb(err) } - if (set[i].semver.prerelease.length > 0) { - var allowed = set[i].semver - if (allowed.major === version.major && - allowed.minor === version.minor && - allowed.patch === version.patch) { - return true + let clientReleased = false + const onError = (err) => { + if (clientReleased) { + return } + clientReleased = true + client.release(err) + cb(err) } - } - // Version has a -pre, but it's not one of the ones we like. - return false + client.once('error', onError) + this.log('dispatching query') + client.query(text, values, (err, res) => { + this.log('query dispatched') + client.removeListener('error', onError) + if (clientReleased) { + return + } + clientReleased = true + client.release(err) + if (err) { + return cb(err) + } else { + return cb(undefined, res) + } + }) + }) + return response.result } - return true -} - -exports.satisfies = satisfies -function satisfies (version, range, options) { - try { - range = new Range(range, options) - } catch (er) { - return false + end(cb) { + this.log('ending') + if (this.ending) { + const err = new Error('Called end on pool more than once') + return cb ? cb(err) : this.Promise.reject(err) + } + this.ending = true + const promised = promisify(this.Promise, cb) + this._endCallback = promised.callback + this._pulseQueue() + return promised.result } - return range.test(version) -} -exports.maxSatisfying = maxSatisfying -function maxSatisfying (versions, range, options) { - var max = null - var maxSV = null - try { - var rangeObj = new Range(range, options) - } catch (er) { - return null + get waitingCount() { + return this._pendingQueue.length } - versions.forEach(function (v) { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!max || maxSV.compare(v) === -1) { - // compare(max, v, true) - max = v - maxSV = new SemVer(max, options) - } - } - }) - return max -} -exports.minSatisfying = minSatisfying -function minSatisfying (versions, range, options) { - var min = null - var minSV = null - try { - var rangeObj = new Range(range, options) - } catch (er) { - return null + get idleCount() { + return this._idle.length } - versions.forEach(function (v) { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!min || minSV.compare(v) === 1) { - // compare(min, v, true) - min = v - minSV = new SemVer(min, options) - } - } - }) - return min -} - -exports.minVersion = minVersion -function minVersion (range, loose) { - range = new Range(range, loose) - var minver = new SemVer('0.0.0') - if (range.test(minver)) { - return minver + get totalCount() { + return this._clients.length } +} +module.exports = Pool - minver = new SemVer('0.0.0-0') - if (range.test(minver)) { - return minver - } - minver = null - for (var i = 0; i < range.set.length; ++i) { - var comparators = range.set[i] +/***/ }), - comparators.forEach(function (comparator) { - // Clone to avoid manipulating the comparator's semver object. - var compver = new SemVer(comparator.semver.version) - switch (comparator.operator) { - case '>': - if (compver.prerelease.length === 0) { - compver.patch++ - } else { - compver.prerelease.push(0) - } - compver.raw = compver.format() - /* fallthrough */ - case '': - case '>=': - if (!minver || gt(minver, compver)) { - minver = compver - } - break - case '<': - case '<=': - /* Ignore maximum versions */ - break - /* istanbul ignore next */ - default: - throw new Error('Unexpected operation: ' + comparator.operator) - } - }) - } +/***/ 10320: +/***/ ((__unused_webpack_module, exports) => { - if (minver && range.test(minver)) { - return minver - } +"use strict"; - return null +Object.defineProperty(exports, "__esModule", ({ value: true })); +const emptyBuffer = Buffer.allocUnsafe(0); +class BufferReader { + constructor(offset = 0) { + this.offset = offset; + this.buffer = emptyBuffer; + // TODO(bmc): support non-utf8 encoding? + this.encoding = 'utf-8'; + } + setBuffer(offset, buffer) { + this.offset = offset; + this.buffer = buffer; + } + int16() { + const result = this.buffer.readInt16BE(this.offset); + this.offset += 2; + return result; + } + byte() { + const result = this.buffer[this.offset]; + this.offset++; + return result; + } + int32() { + const result = this.buffer.readInt32BE(this.offset); + this.offset += 4; + return result; + } + string(length) { + const result = this.buffer.toString(this.encoding, this.offset, this.offset + length); + this.offset += length; + return result; + } + cstring() { + const start = this.offset; + let end = start; + while (this.buffer[end++] !== 0) { } + this.offset = end; + return this.buffer.toString(this.encoding, start, end - 1); + } + bytes(length) { + const result = this.buffer.slice(this.offset, this.offset + length); + this.offset += length; + return result; + } } +exports.BufferReader = BufferReader; +//# sourceMappingURL=buffer-reader.js.map -exports.validRange = validRange -function validRange (range, options) { - try { - // Return '*' instead of '' so that truthiness works. - // This will throw if it's invalid anyway - return new Range(range, options).range || '*' - } catch (er) { - return null - } -} +/***/ }), -// Determine if version is less than all the versions possible in the range -exports.ltr = ltr -function ltr (version, range, options) { - return outside(version, range, '<', options) -} +/***/ 19228: +/***/ ((__unused_webpack_module, exports) => { -// Determine if version is greater than all the versions possible in the range. -exports.gtr = gtr -function gtr (version, range, options) { - return outside(version, range, '>', options) +"use strict"; + +//binary data writer tuned for encoding binary specific to the postgres binary protocol +Object.defineProperty(exports, "__esModule", ({ value: true })); +class Writer { + constructor(size = 256) { + this.size = size; + this.offset = 5; + this.headerPosition = 0; + this.buffer = Buffer.allocUnsafe(size); + } + ensure(size) { + var remaining = this.buffer.length - this.offset; + if (remaining < size) { + var oldBuffer = this.buffer; + // exponential growth factor of around ~ 1.5 + // https://stackoverflow.com/questions/2269063/buffer-growth-strategy + var newSize = oldBuffer.length + (oldBuffer.length >> 1) + size; + this.buffer = Buffer.allocUnsafe(newSize); + oldBuffer.copy(this.buffer); + } + } + addInt32(num) { + this.ensure(4); + this.buffer[this.offset++] = (num >>> 24) & 0xff; + this.buffer[this.offset++] = (num >>> 16) & 0xff; + this.buffer[this.offset++] = (num >>> 8) & 0xff; + this.buffer[this.offset++] = (num >>> 0) & 0xff; + return this; + } + addInt16(num) { + this.ensure(2); + this.buffer[this.offset++] = (num >>> 8) & 0xff; + this.buffer[this.offset++] = (num >>> 0) & 0xff; + return this; + } + addCString(string) { + if (!string) { + this.ensure(1); + } + else { + var len = Buffer.byteLength(string); + this.ensure(len + 1); // +1 for null terminator + this.buffer.write(string, this.offset, 'utf-8'); + this.offset += len; + } + this.buffer[this.offset++] = 0; // null terminator + return this; + } + addString(string = '') { + var len = Buffer.byteLength(string); + this.ensure(len); + this.buffer.write(string, this.offset); + this.offset += len; + return this; + } + add(otherBuffer) { + this.ensure(otherBuffer.length); + otherBuffer.copy(this.buffer, this.offset); + this.offset += otherBuffer.length; + return this; + } + join(code) { + if (code) { + this.buffer[this.headerPosition] = code; + //length is everything in this packet minus the code + const length = this.offset - (this.headerPosition + 1); + this.buffer.writeInt32BE(length, this.headerPosition + 1); + } + return this.buffer.slice(code ? 0 : 5, this.offset); + } + flush(code) { + var result = this.join(code); + this.offset = 5; + this.headerPosition = 0; + this.buffer = Buffer.allocUnsafe(this.size); + return result; + } } +exports.Writer = Writer; +//# sourceMappingURL=buffer-writer.js.map -exports.outside = outside -function outside (version, range, hilo, options) { - version = new SemVer(version, options) - range = new Range(range, options) +/***/ }), - var gtfn, ltefn, ltfn, comp, ecomp - switch (hilo) { - case '>': - gtfn = gt - ltefn = lte - ltfn = lt - comp = '>' - ecomp = '>=' - break - case '<': - gtfn = lt - ltefn = gte - ltfn = gt - comp = '<' - ecomp = '<=' - break - default: - throw new TypeError('Must provide a hilo val of "<" or ">"') - } +/***/ 21113: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // If it satisifes the range it is not outside - if (satisfies(version, range, options)) { - return false - } +"use strict"; - // From now on, variable terms are as if we're in "gtr" mode. - // but note that everything is flipped for the "ltr" function. +Object.defineProperty(exports, "__esModule", ({ value: true })); +const messages_1 = __nccwpck_require__(36359); +exports.DatabaseError = messages_1.DatabaseError; +const serializer_1 = __nccwpck_require__(42439); +exports.serialize = serializer_1.serialize; +const parser_1 = __nccwpck_require__(67912); +function parse(stream, callback) { + const parser = new parser_1.Parser(); + stream.on('data', (buffer) => parser.parse(buffer, callback)); + return new Promise((resolve) => stream.on('end', () => resolve())); +} +exports.parse = parse; +//# sourceMappingURL=index.js.map - for (var i = 0; i < range.set.length; ++i) { - var comparators = range.set[i] +/***/ }), - var high = null - var low = null +/***/ 36359: +/***/ ((__unused_webpack_module, exports) => { - comparators.forEach(function (comparator) { - if (comparator.semver === ANY) { - comparator = new Comparator('>=0.0.0') - } - high = high || comparator - low = low || comparator - if (gtfn(comparator.semver, high.semver, options)) { - high = comparator - } else if (ltfn(comparator.semver, low.semver, options)) { - low = comparator - } - }) +"use strict"; - // If the edge version comparator has a operator then our version - // isn't outside it - if (high.operator === comp || high.operator === ecomp) { - return false +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseComplete = { + name: "parseComplete" /* parseComplete */, + length: 5, +}; +exports.bindComplete = { + name: "bindComplete" /* bindComplete */, + length: 5, +}; +exports.closeComplete = { + name: "closeComplete" /* closeComplete */, + length: 5, +}; +exports.noData = { + name: "noData" /* noData */, + length: 5, +}; +exports.portalSuspended = { + name: "portalSuspended" /* portalSuspended */, + length: 5, +}; +exports.replicationStart = { + name: "replicationStart" /* replicationStart */, + length: 4, +}; +exports.emptyQuery = { + name: "emptyQuery" /* emptyQuery */, + length: 4, +}; +exports.copyDone = { + name: "copyDone" /* copyDone */, + length: 4, +}; +class DatabaseError extends Error { + constructor(message, length, name) { + super(message); + this.length = length; + this.name = name; } - - // If the lowest version comparator has an operator and our version - // is less than it then it isn't higher than the range - if ((!low.operator || low.operator === comp) && - ltefn(version, low.semver)) { - return false - } else if (low.operator === ecomp && ltfn(version, low.semver)) { - return false +} +exports.DatabaseError = DatabaseError; +class CopyDataMessage { + constructor(length, chunk) { + this.length = length; + this.chunk = chunk; + this.name = "copyData" /* copyData */; } - } - return true } - -exports.prerelease = prerelease -function prerelease (version, options) { - var parsed = parse(version, options) - return (parsed && parsed.prerelease.length) ? parsed.prerelease : null +exports.CopyDataMessage = CopyDataMessage; +class CopyResponse { + constructor(length, name, binary, columnCount) { + this.length = length; + this.name = name; + this.binary = binary; + this.columnTypes = new Array(columnCount); + } } - -exports.intersects = intersects -function intersects (r1, r2, options) { - r1 = new Range(r1, options) - r2 = new Range(r2, options) - return r1.intersects(r2) +exports.CopyResponse = CopyResponse; +class Field { + constructor(name, tableID, columnID, dataTypeID, dataTypeSize, dataTypeModifier, format) { + this.name = name; + this.tableID = tableID; + this.columnID = columnID; + this.dataTypeID = dataTypeID; + this.dataTypeSize = dataTypeSize; + this.dataTypeModifier = dataTypeModifier; + this.format = format; + } } +exports.Field = Field; +class RowDescriptionMessage { + constructor(length, fieldCount) { + this.length = length; + this.fieldCount = fieldCount; + this.name = "rowDescription" /* rowDescription */; + this.fields = new Array(this.fieldCount); + } +} +exports.RowDescriptionMessage = RowDescriptionMessage; +class ParameterStatusMessage { + constructor(length, parameterName, parameterValue) { + this.length = length; + this.parameterName = parameterName; + this.parameterValue = parameterValue; + this.name = "parameterStatus" /* parameterStatus */; + } +} +exports.ParameterStatusMessage = ParameterStatusMessage; +class AuthenticationMD5Password { + constructor(length, salt) { + this.length = length; + this.salt = salt; + this.name = "authenticationMD5Password" /* authenticationMD5Password */; + } +} +exports.AuthenticationMD5Password = AuthenticationMD5Password; +class BackendKeyDataMessage { + constructor(length, processID, secretKey) { + this.length = length; + this.processID = processID; + this.secretKey = secretKey; + this.name = "backendKeyData" /* backendKeyData */; + } +} +exports.BackendKeyDataMessage = BackendKeyDataMessage; +class NotificationResponseMessage { + constructor(length, processId, channel, payload) { + this.length = length; + this.processId = processId; + this.channel = channel; + this.payload = payload; + this.name = "notification" /* notification */; + } +} +exports.NotificationResponseMessage = NotificationResponseMessage; +class ReadyForQueryMessage { + constructor(length, status) { + this.length = length; + this.status = status; + this.name = "readyForQuery" /* readyForQuery */; + } +} +exports.ReadyForQueryMessage = ReadyForQueryMessage; +class CommandCompleteMessage { + constructor(length, text) { + this.length = length; + this.text = text; + this.name = "commandComplete" /* commandComplete */; + } +} +exports.CommandCompleteMessage = CommandCompleteMessage; +class DataRowMessage { + constructor(length, fields) { + this.length = length; + this.fields = fields; + this.name = "dataRow" /* dataRow */; + this.fieldCount = fields.length; + } +} +exports.DataRowMessage = DataRowMessage; +class NoticeMessage { + constructor(length, message) { + this.length = length; + this.message = message; + this.name = "notice" /* notice */; + } +} +exports.NoticeMessage = NoticeMessage; +//# sourceMappingURL=messages.js.map -exports.coerce = coerce -function coerce (version) { - if (version instanceof SemVer) { - return version - } - - if (typeof version !== 'string') { - return null - } +/***/ }), - var match = version.match(re[COERCE]) +/***/ 67912: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - if (match == null) { - return null - } +"use strict"; - return parse(match[1] + - '.' + (match[2] || '0') + - '.' + (match[3] || '0')) +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const messages_1 = __nccwpck_require__(36359); +const buffer_reader_1 = __nccwpck_require__(10320); +const assert_1 = __importDefault(__nccwpck_require__(42357)); +// every message is prefixed with a single bye +const CODE_LENGTH = 1; +// every message has an int32 length which includes itself but does +// NOT include the code in the length +const LEN_LENGTH = 4; +const HEADER_LENGTH = CODE_LENGTH + LEN_LENGTH; +const emptyBuffer = Buffer.allocUnsafe(0); +class Parser { + constructor(opts) { + var _a, _b; + this.buffer = emptyBuffer; + this.bufferLength = 0; + this.bufferOffset = 0; + this.reader = new buffer_reader_1.BufferReader(); + if (((_a = opts) === null || _a === void 0 ? void 0 : _a.mode) === 'binary') { + throw new Error('Binary mode not supported yet'); + } + this.mode = ((_b = opts) === null || _b === void 0 ? void 0 : _b.mode) || 'text'; + } + parse(buffer, callback) { + this.mergeBuffer(buffer); + const bufferFullLength = this.bufferOffset + this.bufferLength; + let offset = this.bufferOffset; + while (offset + HEADER_LENGTH <= bufferFullLength) { + // code is 1 byte long - it identifies the message type + const code = this.buffer[offset]; + // length is 1 Uint32BE - it is the length of the message EXCLUDING the code + const length = this.buffer.readUInt32BE(offset + CODE_LENGTH); + const fullMessageLength = CODE_LENGTH + length; + if (fullMessageLength + offset <= bufferFullLength) { + const message = this.handlePacket(offset + HEADER_LENGTH, code, length, this.buffer); + callback(message); + offset += fullMessageLength; + } + else { + break; + } + } + if (offset === bufferFullLength) { + // No more use for the buffer + this.buffer = emptyBuffer; + this.bufferLength = 0; + this.bufferOffset = 0; + } + else { + // Adjust the cursors of remainingBuffer + this.bufferLength = bufferFullLength - offset; + this.bufferOffset = offset; + } + } + mergeBuffer(buffer) { + if (this.bufferLength > 0) { + const newLength = this.bufferLength + buffer.byteLength; + const newFullLength = newLength + this.bufferOffset; + if (newFullLength > this.buffer.byteLength) { + // We can't concat the new buffer with the remaining one + let newBuffer; + if (newLength <= this.buffer.byteLength && this.bufferOffset >= this.bufferLength) { + // We can move the relevant part to the beginning of the buffer instead of allocating a new buffer + newBuffer = this.buffer; + } + else { + // Allocate a new larger buffer + let newBufferLength = this.buffer.byteLength * 2; + while (newLength >= newBufferLength) { + newBufferLength *= 2; + } + newBuffer = Buffer.allocUnsafe(newBufferLength); + } + // Move the remaining buffer to the new one + this.buffer.copy(newBuffer, 0, this.bufferOffset, this.bufferOffset + this.bufferLength); + this.buffer = newBuffer; + this.bufferOffset = 0; + } + // Concat the new buffer with the remaining one + buffer.copy(this.buffer, this.bufferOffset + this.bufferLength); + this.bufferLength = newLength; + } + else { + this.buffer = buffer; + this.bufferOffset = 0; + this.bufferLength = buffer.byteLength; + } + } + handlePacket(offset, code, length, bytes) { + switch (code) { + case 50 /* BindComplete */: + return messages_1.bindComplete; + case 49 /* ParseComplete */: + return messages_1.parseComplete; + case 51 /* CloseComplete */: + return messages_1.closeComplete; + case 110 /* NoData */: + return messages_1.noData; + case 115 /* PortalSuspended */: + return messages_1.portalSuspended; + case 99 /* CopyDone */: + return messages_1.copyDone; + case 87 /* ReplicationStart */: + return messages_1.replicationStart; + case 73 /* EmptyQuery */: + return messages_1.emptyQuery; + case 68 /* DataRow */: + return this.parseDataRowMessage(offset, length, bytes); + case 67 /* CommandComplete */: + return this.parseCommandCompleteMessage(offset, length, bytes); + case 90 /* ReadyForQuery */: + return this.parseReadyForQueryMessage(offset, length, bytes); + case 65 /* NotificationResponse */: + return this.parseNotificationMessage(offset, length, bytes); + case 82 /* AuthenticationResponse */: + return this.parseAuthenticationResponse(offset, length, bytes); + case 83 /* ParameterStatus */: + return this.parseParameterStatusMessage(offset, length, bytes); + case 75 /* BackendKeyData */: + return this.parseBackendKeyData(offset, length, bytes); + case 69 /* ErrorMessage */: + return this.parseErrorMessage(offset, length, bytes, "error" /* error */); + case 78 /* NoticeMessage */: + return this.parseErrorMessage(offset, length, bytes, "notice" /* notice */); + case 84 /* RowDescriptionMessage */: + return this.parseRowDescriptionMessage(offset, length, bytes); + case 71 /* CopyIn */: + return this.parseCopyInMessage(offset, length, bytes); + case 72 /* CopyOut */: + return this.parseCopyOutMessage(offset, length, bytes); + case 100 /* CopyData */: + return this.parseCopyData(offset, length, bytes); + default: + assert_1.default.fail(`unknown message code: ${code.toString(16)}`); + } + } + parseReadyForQueryMessage(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const status = this.reader.string(1); + return new messages_1.ReadyForQueryMessage(length, status); + } + parseCommandCompleteMessage(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const text = this.reader.cstring(); + return new messages_1.CommandCompleteMessage(length, text); + } + parseCopyData(offset, length, bytes) { + const chunk = bytes.slice(offset, offset + (length - 4)); + return new messages_1.CopyDataMessage(length, chunk); + } + parseCopyInMessage(offset, length, bytes) { + return this.parseCopyMessage(offset, length, bytes, "copyInResponse" /* copyInResponse */); + } + parseCopyOutMessage(offset, length, bytes) { + return this.parseCopyMessage(offset, length, bytes, "copyOutResponse" /* copyOutResponse */); + } + parseCopyMessage(offset, length, bytes, messageName) { + this.reader.setBuffer(offset, bytes); + const isBinary = this.reader.byte() !== 0; + const columnCount = this.reader.int16(); + const message = new messages_1.CopyResponse(length, messageName, isBinary, columnCount); + for (let i = 0; i < columnCount; i++) { + message.columnTypes[i] = this.reader.int16(); + } + return message; + } + parseNotificationMessage(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const processId = this.reader.int32(); + const channel = this.reader.cstring(); + const payload = this.reader.cstring(); + return new messages_1.NotificationResponseMessage(length, processId, channel, payload); + } + parseRowDescriptionMessage(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const fieldCount = this.reader.int16(); + const message = new messages_1.RowDescriptionMessage(length, fieldCount); + for (let i = 0; i < fieldCount; i++) { + message.fields[i] = this.parseField(); + } + return message; + } + parseField() { + const name = this.reader.cstring(); + const tableID = this.reader.int32(); + const columnID = this.reader.int16(); + const dataTypeID = this.reader.int32(); + const dataTypeSize = this.reader.int16(); + const dataTypeModifier = this.reader.int32(); + const mode = this.reader.int16() === 0 ? 'text' : 'binary'; + return new messages_1.Field(name, tableID, columnID, dataTypeID, dataTypeSize, dataTypeModifier, mode); + } + parseDataRowMessage(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const fieldCount = this.reader.int16(); + const fields = new Array(fieldCount); + for (let i = 0; i < fieldCount; i++) { + const len = this.reader.int32(); + // a -1 for length means the value of the field is null + fields[i] = len === -1 ? null : this.reader.string(len); + } + return new messages_1.DataRowMessage(length, fields); + } + parseParameterStatusMessage(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const name = this.reader.cstring(); + const value = this.reader.cstring(); + return new messages_1.ParameterStatusMessage(length, name, value); + } + parseBackendKeyData(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const processID = this.reader.int32(); + const secretKey = this.reader.int32(); + return new messages_1.BackendKeyDataMessage(length, processID, secretKey); + } + parseAuthenticationResponse(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const code = this.reader.int32(); + // TODO(bmc): maybe better types here + const message = { + name: "authenticationOk" /* authenticationOk */, + length, + }; + switch (code) { + case 0: // AuthenticationOk + break; + case 3: // AuthenticationCleartextPassword + if (message.length === 8) { + message.name = "authenticationCleartextPassword" /* authenticationCleartextPassword */; + } + break; + case 5: // AuthenticationMD5Password + if (message.length === 12) { + message.name = "authenticationMD5Password" /* authenticationMD5Password */; + const salt = this.reader.bytes(4); + return new messages_1.AuthenticationMD5Password(length, salt); + } + break; + case 10: // AuthenticationSASL + message.name = "authenticationSASL" /* authenticationSASL */; + message.mechanisms = []; + let mechanism; + do { + mechanism = this.reader.cstring(); + if (mechanism) { + message.mechanisms.push(mechanism); + } + } while (mechanism); + break; + case 11: // AuthenticationSASLContinue + message.name = "authenticationSASLContinue" /* authenticationSASLContinue */; + message.data = this.reader.string(length - 8); + break; + case 12: // AuthenticationSASLFinal + message.name = "authenticationSASLFinal" /* authenticationSASLFinal */; + message.data = this.reader.string(length - 8); + break; + default: + throw new Error('Unknown authenticationOk message type ' + code); + } + return message; + } + parseErrorMessage(offset, length, bytes, name) { + this.reader.setBuffer(offset, bytes); + const fields = {}; + let fieldType = this.reader.string(1); + while (fieldType !== '\0') { + fields[fieldType] = this.reader.cstring(); + fieldType = this.reader.string(1); + } + const messageValue = fields.M; + const message = name === "notice" /* notice */ + ? new messages_1.NoticeMessage(length, messageValue) + : new messages_1.DatabaseError(messageValue, length, name); + message.severity = fields.S; + message.code = fields.C; + message.detail = fields.D; + message.hint = fields.H; + message.position = fields.P; + message.internalPosition = fields.p; + message.internalQuery = fields.q; + message.where = fields.W; + message.schema = fields.s; + message.table = fields.t; + message.column = fields.c; + message.dataType = fields.d; + message.constraint = fields.n; + message.file = fields.F; + message.line = fields.L; + message.routine = fields.R; + return message; + } } +exports.Parser = Parser; +//# sourceMappingURL=parser.js.map + +/***/ }), + +/***/ 42439: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const buffer_writer_1 = __nccwpck_require__(19228); +const writer = new buffer_writer_1.Writer(); +const startup = (opts) => { + // protocol version + writer.addInt16(3).addInt16(0); + for (const key of Object.keys(opts)) { + writer.addCString(key).addCString(opts[key]); + } + writer.addCString('client_encoding').addCString('UTF8'); + var bodyBuffer = writer.addCString('').flush(); + // this message is sent without a code + var length = bodyBuffer.length + 4; + return new buffer_writer_1.Writer().addInt32(length).add(bodyBuffer).flush(); +}; +const requestSsl = () => { + const response = Buffer.allocUnsafe(8); + response.writeInt32BE(8, 0); + response.writeInt32BE(80877103, 4); + return response; +}; +const password = (password) => { + return writer.addCString(password).flush(112 /* startup */); +}; +const sendSASLInitialResponseMessage = function (mechanism, initialResponse) { + // 0x70 = 'p' + writer.addCString(mechanism).addInt32(Buffer.byteLength(initialResponse)).addString(initialResponse); + return writer.flush(112 /* startup */); +}; +const sendSCRAMClientFinalMessage = function (additionalData) { + return writer.addString(additionalData).flush(112 /* startup */); +}; +const query = (text) => { + return writer.addCString(text).flush(81 /* query */); +}; +const emptyArray = []; +const parse = (query) => { + // expect something like this: + // { name: 'queryName', + // text: 'select * from blah', + // types: ['int8', 'bool'] } + // normalize missing query names to allow for null + const name = query.name || ''; + if (name.length > 63) { + /* eslint-disable no-console */ + console.error('Warning! Postgres only supports 63 characters for query names.'); + console.error('You supplied %s (%s)', name, name.length); + console.error('This can cause conflicts and silent errors executing queries'); + /* eslint-enable no-console */ + } + const types = query.types || emptyArray; + var len = types.length; + var buffer = writer + .addCString(name) // name of query + .addCString(query.text) // actual query text + .addInt16(len); + for (var i = 0; i < len; i++) { + buffer.addInt32(types[i]); + } + return writer.flush(80 /* parse */); +}; +const paramWriter = new buffer_writer_1.Writer(); +const writeValues = function (values, valueMapper) { + for (let i = 0; i < values.length; i++) { + const mappedVal = valueMapper ? valueMapper(values[i], i) : values[i]; + if (mappedVal == null) { + // add the param type (string) to the writer + writer.addInt16(0 /* STRING */); + // write -1 to the param writer to indicate null + paramWriter.addInt32(-1); + } + else if (mappedVal instanceof Buffer) { + // add the param type (binary) to the writer + writer.addInt16(1 /* BINARY */); + // add the buffer to the param writer + paramWriter.addInt32(mappedVal.length); + paramWriter.add(mappedVal); + } + else { + // add the param type (string) to the writer + writer.addInt16(0 /* STRING */); + paramWriter.addInt32(Buffer.byteLength(mappedVal)); + paramWriter.addString(mappedVal); + } + } +}; +const bind = (config = {}) => { + // normalize config + const portal = config.portal || ''; + const statement = config.statement || ''; + const binary = config.binary || false; + const values = config.values || emptyArray; + const len = values.length; + writer.addCString(portal).addCString(statement); + writer.addInt16(len); + writeValues(values, config.valueMapper); + writer.addInt16(len); + writer.add(paramWriter.flush()); + // format code + writer.addInt16(binary ? 1 /* BINARY */ : 0 /* STRING */); + return writer.flush(66 /* bind */); +}; +const emptyExecute = Buffer.from([69 /* execute */, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x00, 0x00]); +const execute = (config) => { + // this is the happy path for most queries + if (!config || (!config.portal && !config.rows)) { + return emptyExecute; + } + const portal = config.portal || ''; + const rows = config.rows || 0; + const portalLength = Buffer.byteLength(portal); + const len = 4 + portalLength + 1 + 4; + // one extra bit for code + const buff = Buffer.allocUnsafe(1 + len); + buff[0] = 69 /* execute */; + buff.writeInt32BE(len, 1); + buff.write(portal, 5, 'utf-8'); + buff[portalLength + 5] = 0; // null terminate portal cString + buff.writeUInt32BE(rows, buff.length - 4); + return buff; +}; +const cancel = (processID, secretKey) => { + const buffer = Buffer.allocUnsafe(16); + buffer.writeInt32BE(16, 0); + buffer.writeInt16BE(1234, 4); + buffer.writeInt16BE(5678, 6); + buffer.writeInt32BE(processID, 8); + buffer.writeInt32BE(secretKey, 12); + return buffer; +}; +const cstringMessage = (code, string) => { + const stringLen = Buffer.byteLength(string); + const len = 4 + stringLen + 1; + // one extra bit for code + const buffer = Buffer.allocUnsafe(1 + len); + buffer[0] = code; + buffer.writeInt32BE(len, 1); + buffer.write(string, 5, 'utf-8'); + buffer[len] = 0; // null terminate cString + return buffer; +}; +const emptyDescribePortal = writer.addCString('P').flush(68 /* describe */); +const emptyDescribeStatement = writer.addCString('S').flush(68 /* describe */); +const describe = (msg) => { + return msg.name + ? cstringMessage(68 /* describe */, `${msg.type}${msg.name || ''}`) + : msg.type === 'P' + ? emptyDescribePortal + : emptyDescribeStatement; +}; +const close = (msg) => { + const text = `${msg.type}${msg.name || ''}`; + return cstringMessage(67 /* close */, text); +}; +const copyData = (chunk) => { + return writer.add(chunk).flush(100 /* copyFromChunk */); +}; +const copyFail = (message) => { + return cstringMessage(102 /* copyFail */, message); +}; +const codeOnlyBuffer = (code) => Buffer.from([code, 0x00, 0x00, 0x00, 0x04]); +const flushBuffer = codeOnlyBuffer(72 /* flush */); +const syncBuffer = codeOnlyBuffer(83 /* sync */); +const endBuffer = codeOnlyBuffer(88 /* end */); +const copyDoneBuffer = codeOnlyBuffer(99 /* copyDone */); +const serialize = { + startup, + password, + requestSsl, + sendSASLInitialResponseMessage, + sendSCRAMClientFinalMessage, + query, + parse, + bind, + execute, + describe, + close, + flush: () => flushBuffer, + sync: () => syncBuffer, + end: () => endBuffer, + copyData, + copyDone: () => copyDoneBuffer, + copyFail, + cancel, +}; +exports.serialize = serialize; +//# sourceMappingURL=serializer.js.map /***/ }), -/***/ 64314: -/***/ ((module, exports, __nccwpck_require__) => { +/***/ 77929: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -"use strict"; +var textParsers = __nccwpck_require__(50977); +var binaryParsers = __nccwpck_require__(99849); +var arrayParser = __nccwpck_require__(83614); +var builtinTypes = __nccwpck_require__(45967); -var Progress = __nccwpck_require__(11083) -var Gauge = __nccwpck_require__(51800) -var EE = __nccwpck_require__(28614).EventEmitter -var log = exports = module.exports = new EE() -var util = __nccwpck_require__(31669) +exports.getTypeParser = getTypeParser; +exports.setTypeParser = setTypeParser; +exports.arrayParser = arrayParser; +exports.builtins = builtinTypes; -var setBlocking = __nccwpck_require__(79344) -var consoleControl = __nccwpck_require__(73645) +var typeParsers = { + text: {}, + binary: {} +}; -setBlocking(true) -var stream = process.stderr -Object.defineProperty(log, 'stream', { - set: function (newStream) { - stream = newStream - if (this.gauge) this.gauge.setWriteTo(stream, stream) - }, - get: function () { - return stream - } -}) +//the empty parse function +function noParse (val) { + return String(val); +}; -// by default, decide based on tty-ness. -var colorEnabled -log.useColor = function () { - return colorEnabled != null ? colorEnabled : stream.isTTY -} +//returns a function used to convert a specific type (specified by +//oid) into a result javascript type +//note: the oid can be obtained via the following sql query: +//SELECT oid FROM pg_type WHERE typname = 'TYPE_NAME_HERE'; +function getTypeParser (oid, format) { + format = format || 'text'; + if (!typeParsers[format]) { + return noParse; + } + return typeParsers[format][oid] || noParse; +}; -log.enableColor = function () { - colorEnabled = true - this.gauge.setTheme({hasColor: colorEnabled, hasUnicode: unicodeEnabled}) -} -log.disableColor = function () { - colorEnabled = false - this.gauge.setTheme({hasColor: colorEnabled, hasUnicode: unicodeEnabled}) -} +function setTypeParser (oid, format, parseFn) { + if(typeof format == 'function') { + parseFn = format; + format = 'text'; + } + typeParsers[format][oid] = parseFn; +}; -// default level -log.level = 'info' +textParsers.init(function(oid, converter) { + typeParsers.text[oid] = converter; +}); -log.gauge = new Gauge(stream, { - enabled: false, // no progress bars unless asked - theme: {hasColor: log.useColor()}, - template: [ - {type: 'progressbar', length: 20}, - {type: 'activityIndicator', kerning: 1, length: 1}, - {type: 'section', default: ''}, - ':', - {type: 'logline', kerning: 1, default: ''} - ] -}) +binaryParsers.init(function(oid, converter) { + typeParsers.binary[oid] = converter; +}); -log.tracker = new Progress.TrackerGroup() -// we track this separately as we may need to temporarily disable the -// display of the status bar for our own loggy purposes. -log.progressEnabled = log.gauge.isEnabled() +/***/ }), -var unicodeEnabled +/***/ 83614: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -log.enableUnicode = function () { - unicodeEnabled = true - this.gauge.setTheme({hasColor: this.useColor(), hasUnicode: unicodeEnabled}) -} +var array = __nccwpck_require__(34702); -log.disableUnicode = function () { - unicodeEnabled = false - this.gauge.setTheme({hasColor: this.useColor(), hasUnicode: unicodeEnabled}) -} +module.exports = { + create: function (source, transform) { + return { + parse: function() { + return array.parse(source, transform); + } + }; + } +}; -log.setGaugeThemeset = function (themes) { - this.gauge.setThemeset(themes) -} -log.setGaugeTemplate = function (template) { - this.gauge.setTemplate(template) -} +/***/ }), -log.enableProgress = function () { - if (this.progressEnabled) return - this.progressEnabled = true - this.tracker.on('change', this.showProgress) - if (this._pause) return - this.gauge.enable() -} +/***/ 99849: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -log.disableProgress = function () { - if (!this.progressEnabled) return - this.progressEnabled = false - this.tracker.removeListener('change', this.showProgress) - this.gauge.disable() -} +var parseInt64 = __nccwpck_require__(41180); -var trackerConstructors = ['newGroup', 'newItem', 'newStream'] +var parseBits = function(data, bits, offset, invert, callback) { + offset = offset || 0; + invert = invert || false; + callback = callback || function(lastValue, newValue, bits) { return (lastValue * Math.pow(2, bits)) + newValue; }; + var offsetBytes = offset >> 3; -var mixinLog = function (tracker) { - // mixin the public methods from log into the tracker - // (except: conflicts and one's we handle specially) - Object.keys(log).forEach(function (P) { - if (P[0] === '_') return - if (trackerConstructors.filter(function (C) { return C === P }).length) return - if (tracker[P]) return - if (typeof log[P] !== 'function') return - var func = log[P] - tracker[P] = function () { - return func.apply(log, arguments) + var inv = function(value) { + if (invert) { + return ~value & 0xff; } - }) - // if the new tracker is a group, make sure any subtrackers get - // mixed in too - if (tracker instanceof Progress.TrackerGroup) { - trackerConstructors.forEach(function (C) { - var func = tracker[C] - tracker[C] = function () { return mixinLog(func.apply(tracker, arguments)) } - }) - } - return tracker -} - -// Add tracker constructors to the top level log object -trackerConstructors.forEach(function (C) { - log[C] = function () { return mixinLog(this.tracker[C].apply(this.tracker, arguments)) } -}) -log.clearProgress = function (cb) { - if (!this.progressEnabled) return cb && process.nextTick(cb) - this.gauge.hide(cb) -} + return value; + }; -log.showProgress = function (name, completed) { - if (!this.progressEnabled) return - var values = {} - if (name) values.section = name - var last = log.record[log.record.length - 1] - if (last) { - values.subsection = last.prefix - var disp = log.disp[last.level] || last.level - var logline = this._format(disp, log.style[last.level]) - if (last.prefix) logline += ' ' + this._format(last.prefix, this.prefixStyle) - logline += ' ' + last.message.split(/\r?\n/)[0] - values.logline = logline + // read first (maybe partial) byte + var mask = 0xff; + var firstBits = 8 - (offset % 8); + if (bits < firstBits) { + mask = (0xff << (8 - bits)) & 0xff; + firstBits = bits; } - values.completed = completed || this.tracker.completed() - this.gauge.show(values) -}.bind(log) // bind for use in tracker's on-change listener - -// temporarily stop emitting, but don't drop -log.pause = function () { - this._paused = true - if (this.progressEnabled) this.gauge.disable() -} - -log.resume = function () { - if (!this._paused) return - this._paused = false - - var b = this._buffer - this._buffer = [] - b.forEach(function (m) { - this.emitLog(m) - }, this) - if (this.progressEnabled) this.gauge.enable() -} - -log._buffer = [] -var id = 0 -log.record = [] -log.maxRecordSize = 10000 -log.log = function (lvl, prefix, message) { - var l = this.levels[lvl] - if (l === undefined) { - return this.emit('error', new Error(util.format( - 'Undefined log level: %j', lvl))) + if (offset) { + mask = mask >> (offset % 8); } - var a = new Array(arguments.length - 2) - var stack = null - for (var i = 2; i < arguments.length; i++) { - var arg = a[i - 2] = arguments[i] - - // resolve stack traces to a plain string. - if (typeof arg === 'object' && arg && - (arg instanceof Error) && arg.stack) { - - Object.defineProperty(arg, 'stack', { - value: stack = arg.stack + '', - enumerable: true, - writable: true - }) - } + var result = 0; + if ((offset % 8) + bits >= 8) { + result = callback(0, inv(data[offsetBytes]) & mask, firstBits); } - if (stack) a.unshift(stack + '\n') - message = util.format.apply(util, a) - - var m = { id: id++, - level: lvl, - prefix: String(prefix || ''), - message: message, - messageRaw: a } - - this.emit('log', m) - this.emit('log.' + lvl, m) - if (m.prefix) this.emit(m.prefix, m) - this.record.push(m) - var mrs = this.maxRecordSize - var n = this.record.length - mrs - if (n > mrs / 10) { - var newSize = Math.floor(mrs * 0.9) - this.record = this.record.slice(-1 * newSize) + // read bytes + var bytes = (bits + offset) >> 3; + for (var i = offsetBytes + 1; i < bytes; i++) { + result = callback(result, inv(data[i]), 8); } - this.emitLog(m) -}.bind(log) - -log.emitLog = function (m) { - if (this._paused) { - this._buffer.push(m) - return + // bits to read, that are not a complete byte + var lastBits = (bits + offset) % 8; + if (lastBits > 0) { + result = callback(result, inv(data[bytes]) >> (8 - lastBits), lastBits); } - if (this.progressEnabled) this.gauge.pulse(m.prefix) - var l = this.levels[m.level] - if (l === undefined) return - if (l < this.levels[this.level]) return - if (l > 0 && !isFinite(l)) return - // If 'disp' is null or undefined, use the lvl as a default - // Allows: '', 0 as valid disp - var disp = log.disp[m.level] != null ? log.disp[m.level] : m.level - this.clearProgress() - m.message.split(/\r?\n/).forEach(function (line) { - if (this.heading) { - this.write(this.heading, this.headingStyle) - this.write(' ') - } - this.write(disp, log.style[m.level]) - var p = m.prefix || '' - if (p) this.write(' ') - this.write(p, this.prefixStyle) - this.write(' ' + line + '\n') - }, this) - this.showProgress() -} + return result; +}; -log._format = function (msg, style) { - if (!stream) return +var parseFloatFromBits = function(data, precisionBits, exponentBits) { + var bias = Math.pow(2, exponentBits - 1) - 1; + var sign = parseBits(data, 1); + var exponent = parseBits(data, exponentBits, 1); - var output = '' - if (this.useColor()) { - style = style || {} - var settings = [] - if (style.fg) settings.push(style.fg) - if (style.bg) settings.push('bg' + style.bg[0].toUpperCase() + style.bg.slice(1)) - if (style.bold) settings.push('bold') - if (style.underline) settings.push('underline') - if (style.inverse) settings.push('inverse') - if (settings.length) output += consoleControl.color(settings) - if (style.beep) output += consoleControl.beep() - } - output += msg - if (this.useColor()) { - output += consoleControl.color('reset') + if (exponent === 0) { + return 0; } - return output -} - -log.write = function (msg, style) { - if (!stream) return - stream.write(this._format(msg, style)) -} + // parse mantissa + var precisionBitsCounter = 1; + var parsePrecisionBits = function(lastValue, newValue, bits) { + if (lastValue === 0) { + lastValue = 1; + } -log.addLevel = function (lvl, n, style, disp) { - // If 'disp' is null or undefined, use the lvl as a default - if (disp == null) disp = lvl - this.levels[lvl] = n - this.style[lvl] = style - if (!this[lvl]) { - this[lvl] = function () { - var a = new Array(arguments.length + 1) - a[0] = lvl - for (var i = 0; i < arguments.length; i++) { - a[i + 1] = arguments[i] + for (var i = 1; i <= bits; i++) { + precisionBitsCounter /= 2; + if ((newValue & (0x1 << (bits - i))) > 0) { + lastValue += precisionBitsCounter; } - return this.log.apply(this, a) - }.bind(this) - } - this.disp[lvl] = disp -} - -log.prefixStyle = { fg: 'magenta' } -log.headingStyle = { fg: 'white', bg: 'black' } - -log.style = {} -log.levels = {} -log.disp = {} -log.addLevel('silly', -Infinity, { inverse: true }, 'sill') -log.addLevel('verbose', 1000, { fg: 'blue', bg: 'black' }, 'verb') -log.addLevel('info', 2000, { fg: 'green' }) -log.addLevel('timing', 2500, { fg: 'green', bg: 'black' }) -log.addLevel('http', 3000, { fg: 'green', bg: 'black' }) -log.addLevel('notice', 3500, { fg: 'blue', bg: 'black' }) -log.addLevel('warn', 4000, { fg: 'black', bg: 'yellow' }, 'WARN') -log.addLevel('error', 5000, { fg: 'red', bg: 'black' }, 'ERR!') -log.addLevel('silent', Infinity) - -// allow 'error' prefix -log.on('error', function () {}) + } + return lastValue; + }; -/***/ }), + var mantissa = parseBits(data, precisionBits, exponentBits + 1, false, parsePrecisionBits); -/***/ 16325: -/***/ ((module) => { + // special cases + if (exponent == (Math.pow(2, exponentBits + 1) - 1)) { + if (mantissa === 0) { + return (sign === 0) ? Infinity : -Infinity; + } -"use strict"; + return NaN; + } -module.exports = Number.isNaN || function (x) { - return x !== x; + // normale number + return ((sign === 0) ? 1 : -1) * Math.pow(2, exponent - bias) * mantissa; }; +var parseInt16 = function(value) { + if (parseBits(value, 1) == 1) { + return -1 * (parseBits(value, 15, 1, true) + 1); + } -/***/ }), - -/***/ 43248: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + return parseBits(value, 15, 1); +}; -var crypto = __nccwpck_require__(76417) +var parseInt32 = function(value) { + if (parseBits(value, 1) == 1) { + return -1 * (parseBits(value, 31, 1, true) + 1); + } -function sha (key, body, algorithm) { - return crypto.createHmac(algorithm, key).update(body).digest('base64') -} + return parseBits(value, 31, 1); +}; -function rsa (key, body) { - return crypto.createSign('RSA-SHA1').update(body).sign(key, 'base64') -} +var parseFloat32 = function(value) { + return parseFloatFromBits(value, 23, 8); +}; -function rfc3986 (str) { - return encodeURIComponent(str) - .replace(/!/g,'%21') - .replace(/\*/g,'%2A') - .replace(/\(/g,'%28') - .replace(/\)/g,'%29') - .replace(/'/g,'%27') -} +var parseFloat64 = function(value) { + return parseFloatFromBits(value, 52, 11); +}; -// Maps object to bi-dimensional array -// Converts { foo: 'A', bar: [ 'b', 'B' ]} to -// [ ['foo', 'A'], ['bar', 'b'], ['bar', 'B'] ] -function map (obj) { - var key, val, arr = [] - for (key in obj) { - val = obj[key] - if (Array.isArray(val)) - for (var i = 0; i < val.length; i++) - arr.push([key, val[i]]) - else if (typeof val === 'object') - for (var prop in val) - arr.push([key + '[' + prop + ']', val[prop]]) - else - arr.push([key, val]) +var parseNumeric = function(value) { + var sign = parseBits(value, 16, 32); + if (sign == 0xc000) { + return NaN; } - return arr -} - -// Compare function for sort -function compare (a, b) { - return a > b ? 1 : a < b ? -1 : 0 -} - -function generateBase (httpMethod, base_uri, params) { - // adapted from https://dev.twitter.com/docs/auth/oauth and - // https://dev.twitter.com/docs/auth/creating-signature - - // Parameter normalization - // http://tools.ietf.org/html/rfc5849#section-3.4.1.3.2 - var normalized = map(params) - // 1. First, the name and value of each parameter are encoded - .map(function (p) { - return [ rfc3986(p[0]), rfc3986(p[1] || '') ] - }) - // 2. The parameters are sorted by name, using ascending byte value - // ordering. If two or more parameters share the same name, they - // are sorted by their value. - .sort(function (a, b) { - return compare(a[0], b[0]) || compare(a[1], b[1]) - }) - // 3. The name of each parameter is concatenated to its corresponding - // value using an "=" character (ASCII code 61) as a separator, even - // if the value is empty. - .map(function (p) { return p.join('=') }) - // 4. The sorted name/value pairs are concatenated together into a - // single string by using an "&" character (ASCII code 38) as - // separator. - .join('&') - var base = [ - rfc3986(httpMethod ? httpMethod.toUpperCase() : 'GET'), - rfc3986(base_uri), - rfc3986(normalized) - ].join('&') + var weight = Math.pow(10000, parseBits(value, 16, 16)); + var result = 0; - return base -} + var digits = []; + var ndigits = parseBits(value, 16); + for (var i = 0; i < ndigits; i++) { + result += parseBits(value, 16, 64 + (16 * i)) * weight; + weight /= 10000; + } -function hmacsign (httpMethod, base_uri, params, consumer_secret, token_secret) { - var base = generateBase(httpMethod, base_uri, params) - var key = [ - consumer_secret || '', - token_secret || '' - ].map(rfc3986).join('&') + var scale = Math.pow(10, parseBits(value, 16, 48)); + return ((sign === 0) ? 1 : -1) * Math.round(result * scale) / scale; +}; - return sha(key, base, 'sha1') -} +var parseDate = function(isUTC, value) { + var sign = parseBits(value, 1); + var rawValue = parseBits(value, 63, 1); -function hmacsign256 (httpMethod, base_uri, params, consumer_secret, token_secret) { - var base = generateBase(httpMethod, base_uri, params) - var key = [ - consumer_secret || '', - token_secret || '' - ].map(rfc3986).join('&') + // discard usecs and shift from 2000 to 1970 + var result = new Date((((sign === 0) ? 1 : -1) * rawValue / 1000) + 946684800000); - return sha(key, base, 'sha256') -} + if (!isUTC) { + result.setTime(result.getTime() + result.getTimezoneOffset() * 60000); + } -function rsasign (httpMethod, base_uri, params, private_key, token_secret) { - var base = generateBase(httpMethod, base_uri, params) - var key = private_key || '' + // add microseconds to the date + result.usec = rawValue % 1000; + result.getMicroSeconds = function() { + return this.usec; + }; + result.setMicroSeconds = function(value) { + this.usec = value; + }; + result.getUTCMicroSeconds = function() { + return this.usec; + }; - return rsa(key, base) -} + return result; +}; -function plaintext (consumer_secret, token_secret) { - var key = [ - consumer_secret || '', - token_secret || '' - ].map(rfc3986).join('&') +var parseArray = function(value) { + var dim = parseBits(value, 32); - return key -} + var flags = parseBits(value, 32, 32); + var elementType = parseBits(value, 32, 64); -function sign (signMethod, httpMethod, base_uri, params, consumer_secret, token_secret) { - var method - var skipArgs = 1 + var offset = 96; + var dims = []; + for (var i = 0; i < dim; i++) { + // parse dimension + dims[i] = parseBits(value, 32, offset); + offset += 32; - switch (signMethod) { - case 'RSA-SHA1': - method = rsasign - break - case 'HMAC-SHA1': - method = hmacsign - break - case 'HMAC-SHA256': - method = hmacsign256 - break - case 'PLAINTEXT': - method = plaintext - skipArgs = 4 - break - default: - throw new Error('Signature method not supported: ' + signMethod) + // ignore lower bounds + offset += 32; } - return method.apply(null, [].slice.call(arguments, skipArgs)) -} - -exports.hmacsign = hmacsign -exports.hmacsign256 = hmacsign256 -exports.rsasign = rsasign -exports.plaintext = plaintext -exports.sign = sign -exports.rfc3986 = rfc3986 -exports.generateBase = generateBase - -/***/ }), - -/***/ 17426: -/***/ ((module) => { + var parseElement = function(elementType) { + // parse content length + var length = parseBits(value, 32, offset); + offset += 32; -"use strict"; -/* -object-assign -(c) Sindre Sorhus -@license MIT -*/ + // parse null values + if (length == 0xffffffff) { + return null; + } + var result; + if ((elementType == 0x17) || (elementType == 0x14)) { + // int/bigint + result = parseBits(value, length * 8, offset); + offset += length * 8; + return result; + } + else if (elementType == 0x19) { + // string + result = value.toString(this.encoding, offset >> 3, (offset += (length << 3)) >> 3); + return result; + } + else { + console.log("ERROR: ElementType not implemented: " + elementType); + } + }; -/* eslint-disable no-unused-vars */ -var getOwnPropertySymbols = Object.getOwnPropertySymbols; -var hasOwnProperty = Object.prototype.hasOwnProperty; -var propIsEnumerable = Object.prototype.propertyIsEnumerable; + var parse = function(dimension, elementType) { + var array = []; + var i; -function toObject(val) { - if (val === null || val === undefined) { - throw new TypeError('Object.assign cannot be called with null or undefined'); - } + if (dimension.length > 1) { + var count = dimension.shift(); + for (i = 0; i < count; i++) { + array[i] = parse(dimension, elementType); + } + dimension.unshift(count); + } + else { + for (i = 0; i < dimension[0]; i++) { + array[i] = parseElement(elementType); + } + } - return Object(val); -} + return array; + }; -function shouldUseNative() { - try { - if (!Object.assign) { - return false; - } + return parse(dims, elementType); +}; - // Detect buggy property enumeration order in older V8 versions. +var parseText = function(value) { + return value.toString('utf8'); +}; - // https://bugs.chromium.org/p/v8/issues/detail?id=4118 - var test1 = new String('abc'); // eslint-disable-line no-new-wrappers - test1[5] = 'de'; - if (Object.getOwnPropertyNames(test1)[0] === '5') { - return false; - } +var parseBool = function(value) { + if(value === null) return null; + return (parseBits(value, 8) > 0); +}; - // https://bugs.chromium.org/p/v8/issues/detail?id=3056 - var test2 = {}; - for (var i = 0; i < 10; i++) { - test2['_' + String.fromCharCode(i)] = i; - } - var order2 = Object.getOwnPropertyNames(test2).map(function (n) { - return test2[n]; - }); - if (order2.join('') !== '0123456789') { - return false; - } +var init = function(register) { + register(20, parseInt64); + register(21, parseInt16); + register(23, parseInt32); + register(26, parseInt32); + register(1700, parseNumeric); + register(700, parseFloat32); + register(701, parseFloat64); + register(16, parseBool); + register(1114, parseDate.bind(null, false)); + register(1184, parseDate.bind(null, true)); + register(1000, parseArray); + register(1007, parseArray); + register(1016, parseArray); + register(1008, parseArray); + register(1009, parseArray); + register(25, parseText); +}; - // https://bugs.chromium.org/p/v8/issues/detail?id=3056 - var test3 = {}; - 'abcdefghijklmnopqrst'.split('').forEach(function (letter) { - test3[letter] = letter; - }); - if (Object.keys(Object.assign({}, test3)).join('') !== - 'abcdefghijklmnopqrst') { - return false; - } +module.exports = { + init: init +}; - return true; - } catch (err) { - // We don't expect any of the above to throw, but better to be safe. - return false; - } -} -module.exports = shouldUseNative() ? Object.assign : function (target, source) { - var from; - var to = toObject(target); - var symbols; +/***/ }), - for (var s = 1; s < arguments.length; s++) { - from = Object(arguments[s]); +/***/ 45967: +/***/ ((module) => { - for (var key in from) { - if (hasOwnProperty.call(from, key)) { - to[key] = from[key]; - } - } +/** + * Following query was used to generate this file: - if (getOwnPropertySymbols) { - symbols = getOwnPropertySymbols(from); - for (var i = 0; i < symbols.length; i++) { - if (propIsEnumerable.call(from, symbols[i])) { - to[symbols[i]] = from[symbols[i]]; - } - } - } - } + SELECT json_object_agg(UPPER(PT.typname), PT.oid::int4 ORDER BY pt.oid) + FROM pg_type PT + WHERE typnamespace = (SELECT pgn.oid FROM pg_namespace pgn WHERE nspname = 'pg_catalog') -- Take only builting Postgres types with stable OID (extension types are not guaranted to be stable) + AND typtype = 'b' -- Only basic types + AND typelem = 0 -- Ignore aliases + AND typisdefined -- Ignore undefined types + */ - return to; +module.exports = { + BOOL: 16, + BYTEA: 17, + CHAR: 18, + INT8: 20, + INT2: 21, + INT4: 23, + REGPROC: 24, + TEXT: 25, + OID: 26, + TID: 27, + XID: 28, + CID: 29, + JSON: 114, + XML: 142, + PG_NODE_TREE: 194, + SMGR: 210, + PATH: 602, + POLYGON: 604, + CIDR: 650, + FLOAT4: 700, + FLOAT8: 701, + ABSTIME: 702, + RELTIME: 703, + TINTERVAL: 704, + CIRCLE: 718, + MACADDR8: 774, + MONEY: 790, + MACADDR: 829, + INET: 869, + ACLITEM: 1033, + BPCHAR: 1042, + VARCHAR: 1043, + DATE: 1082, + TIME: 1083, + TIMESTAMP: 1114, + TIMESTAMPTZ: 1184, + INTERVAL: 1186, + TIMETZ: 1266, + BIT: 1560, + VARBIT: 1562, + NUMERIC: 1700, + REFCURSOR: 1790, + REGPROCEDURE: 2202, + REGOPER: 2203, + REGOPERATOR: 2204, + REGCLASS: 2205, + REGTYPE: 2206, + UUID: 2950, + TXID_SNAPSHOT: 2970, + PG_LSN: 3220, + PG_NDISTINCT: 3361, + PG_DEPENDENCIES: 3402, + TSVECTOR: 3614, + TSQUERY: 3615, + GTSVECTOR: 3642, + REGCONFIG: 3734, + REGDICTIONARY: 3769, + JSONB: 3802, + REGNAMESPACE: 4089, + REGROLE: 4096 }; /***/ }), -/***/ 1223: +/***/ 50977: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var wrappy = __nccwpck_require__(62940) -module.exports = wrappy(once) -module.exports.strict = wrappy(onceStrict) - -once.proto = once(function () { - Object.defineProperty(Function.prototype, 'once', { - value: function () { - return once(this) - }, - configurable: true - }) - - Object.defineProperty(Function.prototype, 'onceStrict', { - value: function () { - return onceStrict(this) - }, - configurable: true - }) -}) +var array = __nccwpck_require__(34702) +var arrayParser = __nccwpck_require__(83614); +var parseDate = __nccwpck_require__(13264); +var parseInterval = __nccwpck_require__(19991); +var parseByteA = __nccwpck_require__(79377); -function once (fn) { - var f = function () { - if (f.called) return f.value - f.called = true - return f.value = fn.apply(this, arguments) +function allowNull (fn) { + return function nullAllowed (value) { + if (value === null) return value + return fn(value) } - f.called = false - return f } -function onceStrict (fn) { - var f = function () { - if (f.called) - throw new Error(f.onceError) - f.called = true - return f.value = fn.apply(this, arguments) - } - var name = fn.name || 'Function wrapped with `once`' - f.onceError = name + " shouldn't be called more than once" - f.called = false - return f +function parseBool (value) { + if (value === null) return value + return value === 'TRUE' || + value === 't' || + value === 'true' || + value === 'y' || + value === 'yes' || + value === 'on' || + value === '1'; } - -/***/ }), - -/***/ 43406: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -var os = __nccwpck_require__(12087); - -function homedir() { - var env = process.env; - var home = env.HOME; - var user = env.LOGNAME || env.USER || env.LNAME || env.USERNAME; - - if (process.platform === 'win32') { - return env.USERPROFILE || env.HOMEDRIVE + env.HOMEPATH || home || null; - } - - if (process.platform === 'darwin') { - return home || (user ? '/Users/' + user : null); - } - - if (process.platform === 'linux') { - return home || (process.getuid() === 0 ? '/root' : (user ? '/home/' + user : null)); - } - - return home || null; +function parseBoolArray (value) { + if (!value) return null + return array.parse(value, parseBool) } -module.exports = typeof os.homedir === 'function' ? os.homedir : homedir; - - -/***/ }), - -/***/ 71284: -/***/ ((module) => { - -"use strict"; - -var isWindows = process.platform === 'win32'; -var trailingSlashRe = isWindows ? /[^:]\\$/ : /.\/$/; - -// https://github.com/nodejs/node/blob/3e7a14381497a3b73dda68d05b5130563cdab420/lib/os.js#L25-L43 -module.exports = function () { - var path; - - if (isWindows) { - path = process.env.TEMP || - process.env.TMP || - (process.env.SystemRoot || process.env.windir) + '\\temp'; - } else { - path = process.env.TMPDIR || - process.env.TMP || - process.env.TEMP || - '/tmp'; - } - - if (trailingSlashRe.test(path)) { - path = path.slice(0, -1); - } - - return path; -}; - - -/***/ }), +function parseBaseTenInt (string) { + return parseInt(string, 10) +} -/***/ 84669: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +function parseIntegerArray (value) { + if (!value) return null + return array.parse(value, allowNull(parseBaseTenInt)) +} -var isWindows = process.platform === 'win32' -var path = __nccwpck_require__(85622) -var exec = __nccwpck_require__(63129).exec -var osTmpdir = __nccwpck_require__(71284) -var osHomedir = __nccwpck_require__(43406) +function parseBigIntegerArray (value) { + if (!value) return null + return array.parse(value, allowNull(function (entry) { + return parseBigInteger(entry).trim() + })) +} -// looking up envs is a bit costly. -// Also, sometimes we want to have a fallback -// Pass in a callback to wait for the fallback on failures -// After the first lookup, always returns the same thing. -function memo (key, lookup, fallback) { - var fell = false - var falling = false - exports[key] = function (cb) { - var val = lookup() - if (!val && !fell && !falling && fallback) { - fell = true - falling = true - exec(fallback, function (er, output, stderr) { - falling = false - if (er) return // oh well, we tried - val = output.trim() - }) - } - exports[key] = function (cb) { - if (cb) process.nextTick(cb.bind(null, null, val)) - return val +var parsePointArray = function(value) { + if(!value) { return null; } + var p = arrayParser.create(value, function(entry) { + if(entry !== null) { + entry = parsePoint(entry); } - if (cb && !falling) process.nextTick(cb.bind(null, null, val)) - return val - } -} + return entry; + }); -memo('user', function () { - return ( isWindows - ? process.env.USERDOMAIN + '\\' + process.env.USERNAME - : process.env.USER - ) -}, 'whoami') + return p.parse(); +}; -memo('prompt', function () { - return isWindows ? process.env.PROMPT : process.env.PS1 -}) +var parseFloatArray = function(value) { + if(!value) { return null; } + var p = arrayParser.create(value, function(entry) { + if(entry !== null) { + entry = parseFloat(entry); + } + return entry; + }); -memo('hostname', function () { - return isWindows ? process.env.COMPUTERNAME : process.env.HOSTNAME -}, 'hostname') + return p.parse(); +}; -memo('tmpdir', function () { - return osTmpdir() -}) +var parseStringArray = function(value) { + if(!value) { return null; } -memo('home', function () { - return osHomedir() -}) + var p = arrayParser.create(value); + return p.parse(); +}; -memo('path', function () { - return (process.env.PATH || - process.env.Path || - process.env.path).split(isWindows ? ';' : ':') -}) +var parseDateArray = function(value) { + if (!value) { return null; } -memo('editor', function () { - return process.env.EDITOR || - process.env.VISUAL || - (isWindows ? 'notepad.exe' : 'vi') -}) + var p = arrayParser.create(value, function(entry) { + if (entry !== null) { + entry = parseDate(entry); + } + return entry; + }); -memo('shell', function () { - return isWindows ? process.env.ComSpec || 'cmd' - : process.env.SHELL || 'bash' -}) + return p.parse(); +}; +var parseIntervalArray = function(value) { + if (!value) { return null; } -/***/ }), + var p = arrayParser.create(value, function(entry) { + if (entry !== null) { + entry = parseInterval(entry); + } + return entry; + }); -/***/ 9759: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + return p.parse(); +}; -"use strict"; +var parseByteAArray = function(value) { + if (!value) { return null; } + return array.parse(value, allowNull(parseByteA)); +}; -const doctype = __nccwpck_require__(27079); -const { DOCUMENT_MODE } = __nccwpck_require__(69338); +var parseInteger = function(value) { + return parseInt(value, 10); +}; -//Conversion tables for DOM Level1 structure emulation -const nodeTypes = { - element: 1, - text: 3, - cdata: 4, - comment: 8 +var parseBigInteger = function(value) { + var valStr = String(value); + if (/^\d+$/.test(valStr)) { return valStr; } + return value; }; -const nodePropertyShorthands = { - tagName: 'name', - childNodes: 'children', - parentNode: 'parent', - previousSibling: 'prev', - nextSibling: 'next', - nodeValue: 'data' +var parseJsonArray = function(value) { + if (!value) { return null; } + + return array.parse(value, allowNull(JSON.parse)); }; -//Node -class Node { - constructor(props) { - for (const key of Object.keys(props)) { - this[key] = props[key]; - } - } +var parsePoint = function(value) { + if (value[0] !== '(') { return null; } - get firstChild() { - const children = this.children; + value = value.substring( 1, value.length - 1 ).split(','); - return (children && children[0]) || null; - } + return { + x: parseFloat(value[0]) + , y: parseFloat(value[1]) + }; +}; - get lastChild() { - const children = this.children; +var parseCircle = function(value) { + if (value[0] !== '<' && value[1] !== '(') { return null; } - return (children && children[children.length - 1]) || null; + var point = '('; + var radius = ''; + var pointParsed = false; + for (var i = 2; i < value.length - 1; i++){ + if (!pointParsed) { + point += value[i]; } - get nodeType() { - return nodeTypes[this.type] || nodeTypes.element; + if (value[i] === ')') { + pointParsed = true; + continue; + } else if (!pointParsed) { + continue; } -} -Object.keys(nodePropertyShorthands).forEach(key => { - const shorthand = nodePropertyShorthands[key]; + if (value[i] === ','){ + continue; + } - Object.defineProperty(Node.prototype, key, { - get: function() { - return this[shorthand] || null; - }, - set: function(val) { - this[shorthand] = val; - return val; - } - }); -}); + radius += value[i]; + } + var result = parsePoint(point); + result.radius = parseFloat(radius); -//Node construction -exports.createDocument = function() { - return new Node({ - type: 'root', - name: 'root', - parent: null, - prev: null, - next: null, - children: [], - 'x-mode': DOCUMENT_MODE.NO_QUIRKS - }); + return result; }; -exports.createDocumentFragment = function() { - return new Node({ - type: 'root', - name: 'root', - parent: null, - prev: null, - next: null, - children: [] - }); +var init = function(register) { + register(20, parseBigInteger); // int8 + register(21, parseInteger); // int2 + register(23, parseInteger); // int4 + register(26, parseInteger); // oid + register(700, parseFloat); // float4/real + register(701, parseFloat); // float8/double + register(16, parseBool); + register(1082, parseDate); // date + register(1114, parseDate); // timestamp without timezone + register(1184, parseDate); // timestamp + register(600, parsePoint); // point + register(651, parseStringArray); // cidr[] + register(718, parseCircle); // circle + register(1000, parseBoolArray); + register(1001, parseByteAArray); + register(1005, parseIntegerArray); // _int2 + register(1007, parseIntegerArray); // _int4 + register(1028, parseIntegerArray); // oid[] + register(1016, parseBigIntegerArray); // _int8 + register(1017, parsePointArray); // point[] + register(1021, parseFloatArray); // _float4 + register(1022, parseFloatArray); // _float8 + register(1231, parseFloatArray); // _numeric + register(1014, parseStringArray); //char + register(1015, parseStringArray); //varchar + register(1008, parseStringArray); + register(1009, parseStringArray); + register(1040, parseStringArray); // macaddr[] + register(1041, parseStringArray); // inet[] + register(1115, parseDateArray); // timestamp without time zone[] + register(1182, parseDateArray); // _date + register(1185, parseDateArray); // timestamp with time zone[] + register(1186, parseInterval); + register(1187, parseIntervalArray); + register(17, parseByteA); + register(114, JSON.parse.bind(JSON)); // json + register(3802, JSON.parse.bind(JSON)); // jsonb + register(199, parseJsonArray); // json[] + register(3807, parseJsonArray); // jsonb[] + register(3907, parseStringArray); // numrange[] + register(2951, parseStringArray); // uuid[] + register(791, parseStringArray); // money[] + register(1183, parseStringArray); // time[] + register(1270, parseStringArray); // timetz[] }; -exports.createElement = function(tagName, namespaceURI, attrs) { - const attribs = Object.create(null); - const attribsNamespace = Object.create(null); - const attribsPrefix = Object.create(null); +module.exports = { + init: init +}; - for (let i = 0; i < attrs.length; i++) { - const attrName = attrs[i].name; - attribs[attrName] = attrs[i].value; - attribsNamespace[attrName] = attrs[i].namespace; - attribsPrefix[attrName] = attrs[i].prefix; - } +/***/ }), - return new Node({ - type: tagName === 'script' || tagName === 'style' ? tagName : 'tag', - name: tagName, - namespace: namespaceURI, - attribs: attribs, - 'x-attribsNamespace': attribsNamespace, - 'x-attribsPrefix': attribsPrefix, - children: [], - parent: null, - prev: null, - next: null - }); -}; +/***/ 37143: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -exports.createCommentNode = function(data) { - return new Node({ - type: 'comment', - data: data, - parent: null, - prev: null, - next: null - }); -}; +"use strict"; -const createTextNode = function(value) { - return new Node({ - type: 'text', - data: value, - parent: null, - prev: null, - next: null - }); -}; -//Tree mutation -const appendChild = (exports.appendChild = function(parentNode, newNode) { - const prev = parentNode.children[parentNode.children.length - 1]; +var EventEmitter = __nccwpck_require__(28614).EventEmitter +var util = __nccwpck_require__(31669) +var utils = __nccwpck_require__(36419) +var sasl = __nccwpck_require__(19481) +var pgPass = __nccwpck_require__(19713) +var TypeOverrides = __nccwpck_require__(78873) - if (prev) { - prev.next = newNode; - newNode.prev = prev; - } +var ConnectionParameters = __nccwpck_require__(19112) +var Query = __nccwpck_require__(31283) +var defaults = __nccwpck_require__(26419) +var Connection = __nccwpck_require__(62848) - parentNode.children.push(newNode); - newNode.parent = parentNode; -}); +class Client extends EventEmitter { + constructor(config) { + super() -const insertBefore = (exports.insertBefore = function(parentNode, newNode, referenceNode) { - const insertionIdx = parentNode.children.indexOf(referenceNode); - const prev = referenceNode.prev; + this.connectionParameters = new ConnectionParameters(config) + this.user = this.connectionParameters.user + this.database = this.connectionParameters.database + this.port = this.connectionParameters.port + this.host = this.connectionParameters.host - if (prev) { - prev.next = newNode; - newNode.prev = prev; + // "hiding" the password so it doesn't show up in stack traces + // or if the client is console.logged + Object.defineProperty(this, 'password', { + configurable: true, + enumerable: false, + writable: true, + value: this.connectionParameters.password, + }) + + this.replication = this.connectionParameters.replication + + var c = config || {} + + this._Promise = c.Promise || global.Promise + this._types = new TypeOverrides(c.types) + this._ending = false + this._connecting = false + this._connected = false + this._connectionError = false + this._queryable = true + + this.connection = + c.connection || + new Connection({ + stream: c.stream, + ssl: this.connectionParameters.ssl, + keepAlive: c.keepAlive || false, + keepAliveInitialDelayMillis: c.keepAliveInitialDelayMillis || 0, + encoding: this.connectionParameters.client_encoding || 'utf8', + }) + this.queryQueue = [] + this.binary = c.binary || defaults.binary + this.processID = null + this.secretKey = null + this.ssl = this.connectionParameters.ssl || false + // As with Password, make SSL->Key (the private key) non-enumerable. + // It won't show up in stack traces + // or if the client is console.logged + if (this.ssl && this.ssl.key) { + Object.defineProperty(this.ssl, 'key', { + enumerable: false, + }) } - referenceNode.prev = newNode; - newNode.next = referenceNode; + this._connectionTimeoutMillis = c.connectionTimeoutMillis || 0 + } - parentNode.children.splice(insertionIdx, 0, newNode); - newNode.parent = parentNode; -}); + _errorAllQueries(err) { + const enqueueError = (query) => { + process.nextTick(() => { + query.handleError(err, this.connection) + }) + } -exports.setTemplateContent = function(templateElement, contentElement) { - appendChild(templateElement, contentElement); -}; + if (this.activeQuery) { + enqueueError(this.activeQuery) + this.activeQuery = null + } -exports.getTemplateContent = function(templateElement) { - return templateElement.children[0]; -}; + this.queryQueue.forEach(enqueueError) + this.queryQueue.length = 0 + } -exports.setDocumentType = function(document, name, publicId, systemId) { - const data = doctype.serializeContent(name, publicId, systemId); - let doctypeNode = null; + _connect(callback) { + var self = this + var con = this.connection + this._connectionCallback = callback - for (let i = 0; i < document.children.length; i++) { - if (document.children[i].type === 'directive' && document.children[i].name === '!doctype') { - doctypeNode = document.children[i]; - break; - } + if (this._connecting || this._connected) { + const err = new Error('Client has already been connected. You cannot reuse a client.') + process.nextTick(() => { + callback(err) + }) + return } + this._connecting = true - if (doctypeNode) { - doctypeNode.data = data; - doctypeNode['x-name'] = name; - doctypeNode['x-publicId'] = publicId; - doctypeNode['x-systemId'] = systemId; + this.connectionTimeoutHandle + if (this._connectionTimeoutMillis > 0) { + this.connectionTimeoutHandle = setTimeout(() => { + con._ending = true + con.stream.destroy(new Error('timeout expired')) + }, this._connectionTimeoutMillis) + } + + if (this.host && this.host.indexOf('/') === 0) { + con.connect(this.host + '/.s.PGSQL.' + this.port) } else { - appendChild( - document, - new Node({ - type: 'directive', - name: '!doctype', - data: data, - 'x-name': name, - 'x-publicId': publicId, - 'x-systemId': systemId - }) - ); + con.connect(this.port, this.host) } -}; -exports.setDocumentMode = function(document, mode) { - document['x-mode'] = mode; -}; + // once connection is established send startup message + con.on('connect', function () { + if (self.ssl) { + con.requestSsl() + } else { + con.startup(self.getStartupConf()) + } + }) -exports.getDocumentMode = function(document) { - return document['x-mode']; -}; + con.on('sslconnect', function () { + con.startup(self.getStartupConf()) + }) -exports.detachNode = function(node) { - if (node.parent) { - const idx = node.parent.children.indexOf(node); - const prev = node.prev; - const next = node.next; + this._attachListeners(con) - node.prev = null; - node.next = null; + con.once('end', () => { + const error = this._ending ? new Error('Connection terminated') : new Error('Connection terminated unexpectedly') - if (prev) { - prev.next = next; - } + clearTimeout(this.connectionTimeoutHandle) + this._errorAllQueries(error) - if (next) { - next.prev = prev; + if (!this._ending) { + // if the connection is ended without us calling .end() + // on this client then we have an unexpected disconnection + // treat this as an error unless we've already emitted an error + // during connection. + if (this._connecting && !this._connectionError) { + if (this._connectionCallback) { + this._connectionCallback(error) + } else { + this._handleErrorEvent(error) + } + } else if (!this._connectionError) { + this._handleErrorEvent(error) } + } - node.parent.children.splice(idx, 1); - node.parent = null; - } -}; - -exports.insertText = function(parentNode, text) { - const lastChild = parentNode.children[parentNode.children.length - 1]; + process.nextTick(() => { + this.emit('end') + }) + }) + } - if (lastChild && lastChild.type === 'text') { - lastChild.data += text; - } else { - appendChild(parentNode, createTextNode(text)); + connect(callback) { + if (callback) { + this._connect(callback) + return } -}; -exports.insertTextBefore = function(parentNode, text, referenceNode) { - const prevNode = parentNode.children[parentNode.children.indexOf(referenceNode) - 1]; + return new this._Promise((resolve, reject) => { + this._connect((error) => { + if (error) { + reject(error) + } else { + resolve() + } + }) + }) + } - if (prevNode && prevNode.type === 'text') { - prevNode.data += text; + _attachListeners(con) { + // password request handling + con.on('authenticationCleartextPassword', this._handleAuthCleartextPassword.bind(this)) + // password request handling + con.on('authenticationMD5Password', this._handleAuthMD5Password.bind(this)) + // password request handling (SASL) + con.on('authenticationSASL', this._handleAuthSASL.bind(this)) + con.on('authenticationSASLContinue', this._handleAuthSASLContinue.bind(this)) + con.on('authenticationSASLFinal', this._handleAuthSASLFinal.bind(this)) + con.on('backendKeyData', this._handleBackendKeyData.bind(this)) + con.on('error', this._handleErrorEvent.bind(this)) + con.on('errorMessage', this._handleErrorMessage.bind(this)) + con.on('readyForQuery', this._handleReadyForQuery.bind(this)) + con.on('notice', this._handleNotice.bind(this)) + con.on('rowDescription', this._handleRowDescription.bind(this)) + con.on('dataRow', this._handleDataRow.bind(this)) + con.on('portalSuspended', this._handlePortalSuspended.bind(this)) + con.on('emptyQuery', this._handleEmptyQuery.bind(this)) + con.on('commandComplete', this._handleCommandComplete.bind(this)) + con.on('parseComplete', this._handleParseComplete.bind(this)) + con.on('copyInResponse', this._handleCopyInResponse.bind(this)) + con.on('copyData', this._handleCopyData.bind(this)) + con.on('notification', this._handleNotification.bind(this)) + } + + // TODO(bmc): deprecate pgpass "built in" integration since this.password can be a function + // it can be supplied by the user if required - this is a breaking change! + _checkPgPass(cb) { + const con = this.connection + if (typeof this.password === 'function') { + this._Promise + .resolve() + .then(() => this.password()) + .then((pass) => { + if (pass !== undefined) { + if (typeof pass !== 'string') { + con.emit('error', new TypeError('Password must be a string')) + return + } + this.connectionParameters.password = this.password = pass + } else { + this.connectionParameters.password = this.password = null + } + cb() + }) + .catch((err) => { + con.emit('error', err) + }) + } else if (this.password !== null) { + cb() } else { - insertBefore(parentNode, createTextNode(text), referenceNode); + pgPass(this.connectionParameters, (pass) => { + if (undefined !== pass) { + this.connectionParameters.password = this.password = pass + } + cb() + }) } -}; + } -exports.adoptAttributes = function(recipient, attrs) { - for (let i = 0; i < attrs.length; i++) { - const attrName = attrs[i].name; + _handleAuthCleartextPassword(msg) { + this._checkPgPass(() => { + this.connection.password(this.password) + }) + } - if (typeof recipient.attribs[attrName] === 'undefined') { - recipient.attribs[attrName] = attrs[i].value; - recipient['x-attribsNamespace'][attrName] = attrs[i].namespace; - recipient['x-attribsPrefix'][attrName] = attrs[i].prefix; - } - } -}; + _handleAuthMD5Password(msg) { + this._checkPgPass(() => { + const hashedPassword = utils.postgresMd5PasswordHash(this.user, this.password, msg.salt) + this.connection.password(hashedPassword) + }) + } -//Tree traversing -exports.getFirstChild = function(node) { - return node.children[0]; -}; + _handleAuthSASL(msg) { + this._checkPgPass(() => { + this.saslSession = sasl.startSession(msg.mechanisms) + this.connection.sendSASLInitialResponseMessage(this.saslSession.mechanism, this.saslSession.response) + }) + } -exports.getChildNodes = function(node) { - return node.children; -}; + _handleAuthSASLContinue(msg) { + sasl.continueSession(this.saslSession, this.password, msg.data) + this.connection.sendSCRAMClientFinalMessage(this.saslSession.response) + } -exports.getParentNode = function(node) { - return node.parent; -}; + _handleAuthSASLFinal(msg) { + sasl.finalizeSession(this.saslSession, msg.data) + this.saslSession = null + } -exports.getAttrList = function(element) { - const attrList = []; + _handleBackendKeyData(msg) { + this.processID = msg.processID + this.secretKey = msg.secretKey + } - for (const name in element.attribs) { - attrList.push({ - name: name, - value: element.attribs[name], - namespace: element['x-attribsNamespace'][name], - prefix: element['x-attribsPrefix'][name] - }); + _handleReadyForQuery(msg) { + if (this._connecting) { + this._connecting = false + this._connected = true + clearTimeout(this.connectionTimeoutHandle) + + // process possible callback argument to Client#connect + if (this._connectionCallback) { + this._connectionCallback(null, this) + // remove callback for proper error handling + // after the connect event + this._connectionCallback = null + } + this.emit('connect') + } + const { activeQuery } = this + this.activeQuery = null + this.readyForQuery = true + if (activeQuery) { + activeQuery.handleReadyForQuery(this.connection) } + this._pulseQueryQueue() + } - return attrList; -}; + // if we receieve an error event or error message + // during the connection process we handle it here + _handleErrorWhileConnecting(err) { + if (this._connectionError) { + // TODO(bmc): this is swallowing errors - we shouldn't do this + return + } + this._connectionError = true + clearTimeout(this.connectionTimeoutHandle) + if (this._connectionCallback) { + return this._connectionCallback(err) + } + this.emit('error', err) + } -//Node data -exports.getTagName = function(element) { - return element.name; -}; + // if we're connected and we receive an error event from the connection + // this means the socket is dead - do a hard abort of all queries and emit + // the socket error on the client as well + _handleErrorEvent(err) { + if (this._connecting) { + return this._handleErrorWhileConnecting(err) + } + this._queryable = false + this._errorAllQueries(err) + this.emit('error', err) + } -exports.getNamespaceURI = function(element) { - return element.namespace; -}; + // handle error messages from the postgres backend + _handleErrorMessage(msg) { + if (this._connecting) { + return this._handleErrorWhileConnecting(msg) + } + const activeQuery = this.activeQuery -exports.getTextNodeContent = function(textNode) { - return textNode.data; -}; + if (!activeQuery) { + this._handleErrorEvent(msg) + return + } -exports.getCommentNodeContent = function(commentNode) { - return commentNode.data; -}; + this.activeQuery = null + activeQuery.handleError(msg, this.connection) + } -exports.getDocumentTypeNodeName = function(doctypeNode) { - return doctypeNode['x-name']; -}; + _handleRowDescription(msg) { + // delegate rowDescription to active query + this.activeQuery.handleRowDescription(msg) + } -exports.getDocumentTypeNodePublicId = function(doctypeNode) { - return doctypeNode['x-publicId']; -}; + _handleDataRow(msg) { + // delegate dataRow to active query + this.activeQuery.handleDataRow(msg) + } -exports.getDocumentTypeNodeSystemId = function(doctypeNode) { - return doctypeNode['x-systemId']; -}; + _handlePortalSuspended(msg) { + // delegate portalSuspended to active query + this.activeQuery.handlePortalSuspended(this.connection) + } -//Node types -exports.isTextNode = function(node) { - return node.type === 'text'; -}; + _handleEmptyQuery(msg) { + // delegate emptyQuery to active query + this.activeQuery.handleEmptyQuery(this.connection) + } -exports.isCommentNode = function(node) { - return node.type === 'comment'; -}; + _handleCommandComplete(msg) { + // delegate commandComplete to active query + this.activeQuery.handleCommandComplete(msg, this.connection) + } -exports.isDocumentTypeNode = function(node) { - return node.type === 'directive' && node.name === '!doctype'; -}; + _handleParseComplete(msg) { + // if a prepared statement has a name and properly parses + // we track that its already been executed so we don't parse + // it again on the same client + if (this.activeQuery.name) { + this.connection.parsedStatements[this.activeQuery.name] = this.activeQuery.text + } + } -exports.isElementNode = function(node) { - return !!node.attribs; -}; + _handleCopyInResponse(msg) { + this.activeQuery.handleCopyInResponse(this.connection) + } -// Source code location -exports.setNodeSourceCodeLocation = function(node, location) { - node.sourceCodeLocation = location; -}; + _handleCopyData(msg) { + this.activeQuery.handleCopyData(msg, this.connection) + } -exports.getNodeSourceCodeLocation = function(node) { - return node.sourceCodeLocation; -}; + _handleNotification(msg) { + this.emit('notification', msg) + } -exports.updateNodeSourceCodeLocation = function(node, endLocation) { - node.sourceCodeLocation = Object.assign(node.sourceCodeLocation, endLocation); -}; + _handleNotice(msg) { + this.emit('notice', msg) + } + getStartupConf() { + var params = this.connectionParameters -/***/ }), + var data = { + user: params.user, + database: params.database, + } -/***/ 27079: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + var appName = params.application_name || params.fallback_application_name + if (appName) { + data.application_name = appName + } + if (params.replication) { + data.replication = '' + params.replication + } + if (params.statement_timeout) { + data.statement_timeout = String(parseInt(params.statement_timeout, 10)) + } + if (params.idle_in_transaction_session_timeout) { + data.idle_in_transaction_session_timeout = String(parseInt(params.idle_in_transaction_session_timeout, 10)) + } + if (params.options) { + data.options = params.options + } -"use strict"; + return data + } + cancel(client, query) { + if (client.activeQuery === query) { + var con = this.connection -const { DOCUMENT_MODE } = __nccwpck_require__(69338); + if (this.host && this.host.indexOf('/') === 0) { + con.connect(this.host + '/.s.PGSQL.' + this.port) + } else { + con.connect(this.port, this.host) + } -//Const -const VALID_DOCTYPE_NAME = 'html'; -const VALID_SYSTEM_ID = 'about:legacy-compat'; -const QUIRKS_MODE_SYSTEM_ID = 'http://www.ibm.com/data/dtd/v11/ibmxhtml1-transitional.dtd'; + // once connection is established send cancel message + con.on('connect', function () { + con.cancel(client.processID, client.secretKey) + }) + } else if (client.queryQueue.indexOf(query) !== -1) { + client.queryQueue.splice(client.queryQueue.indexOf(query), 1) + } + } -const QUIRKS_MODE_PUBLIC_ID_PREFIXES = [ - '+//silmaril//dtd html pro v0r11 19970101//', - '-//as//dtd html 3.0 aswedit + extensions//', - '-//advasoft ltd//dtd html 3.0 aswedit + extensions//', - '-//ietf//dtd html 2.0 level 1//', - '-//ietf//dtd html 2.0 level 2//', - '-//ietf//dtd html 2.0 strict level 1//', - '-//ietf//dtd html 2.0 strict level 2//', - '-//ietf//dtd html 2.0 strict//', - '-//ietf//dtd html 2.0//', - '-//ietf//dtd html 2.1e//', - '-//ietf//dtd html 3.0//', - '-//ietf//dtd html 3.2 final//', - '-//ietf//dtd html 3.2//', - '-//ietf//dtd html 3//', - '-//ietf//dtd html level 0//', - '-//ietf//dtd html level 1//', - '-//ietf//dtd html level 2//', - '-//ietf//dtd html level 3//', - '-//ietf//dtd html strict level 0//', - '-//ietf//dtd html strict level 1//', - '-//ietf//dtd html strict level 2//', - '-//ietf//dtd html strict level 3//', - '-//ietf//dtd html strict//', - '-//ietf//dtd html//', - '-//metrius//dtd metrius presentational//', - '-//microsoft//dtd internet explorer 2.0 html strict//', - '-//microsoft//dtd internet explorer 2.0 html//', - '-//microsoft//dtd internet explorer 2.0 tables//', - '-//microsoft//dtd internet explorer 3.0 html strict//', - '-//microsoft//dtd internet explorer 3.0 html//', - '-//microsoft//dtd internet explorer 3.0 tables//', - '-//netscape comm. corp.//dtd html//', - '-//netscape comm. corp.//dtd strict html//', - "-//o'reilly and associates//dtd html 2.0//", - "-//o'reilly and associates//dtd html extended 1.0//", - "-//o'reilly and associates//dtd html extended relaxed 1.0//", - '-//sq//dtd html 2.0 hotmetal + extensions//', - '-//softquad software//dtd hotmetal pro 6.0::19990601::extensions to html 4.0//', - '-//softquad//dtd hotmetal pro 4.0::19971010::extensions to html 4.0//', - '-//spyglass//dtd html 2.0 extended//', - '-//sun microsystems corp.//dtd hotjava html//', - '-//sun microsystems corp.//dtd hotjava strict html//', - '-//w3c//dtd html 3 1995-03-24//', - '-//w3c//dtd html 3.2 draft//', - '-//w3c//dtd html 3.2 final//', - '-//w3c//dtd html 3.2//', - '-//w3c//dtd html 3.2s draft//', - '-//w3c//dtd html 4.0 frameset//', - '-//w3c//dtd html 4.0 transitional//', - '-//w3c//dtd html experimental 19960712//', - '-//w3c//dtd html experimental 970421//', - '-//w3c//dtd w3 html//', - '-//w3o//dtd w3 html 3.0//', - '-//webtechs//dtd mozilla html 2.0//', - '-//webtechs//dtd mozilla html//' -]; + setTypeParser(oid, format, parseFn) { + return this._types.setTypeParser(oid, format, parseFn) + } -const QUIRKS_MODE_NO_SYSTEM_ID_PUBLIC_ID_PREFIXES = QUIRKS_MODE_PUBLIC_ID_PREFIXES.concat([ - '-//w3c//dtd html 4.01 frameset//', - '-//w3c//dtd html 4.01 transitional//' -]); + getTypeParser(oid, format) { + return this._types.getTypeParser(oid, format) + } -const QUIRKS_MODE_PUBLIC_IDS = ['-//w3o//dtd w3 html strict 3.0//en//', '-/w3c/dtd html 4.0 transitional/en', 'html']; -const LIMITED_QUIRKS_PUBLIC_ID_PREFIXES = ['-//w3c//dtd xhtml 1.0 frameset//', '-//w3c//dtd xhtml 1.0 transitional//']; + // Ported from PostgreSQL 9.2.4 source code in src/interfaces/libpq/fe-exec.c + escapeIdentifier(str) { + return '"' + str.replace(/"/g, '""') + '"' + } -const LIMITED_QUIRKS_WITH_SYSTEM_ID_PUBLIC_ID_PREFIXES = LIMITED_QUIRKS_PUBLIC_ID_PREFIXES.concat([ - '-//w3c//dtd html 4.01 frameset//', - '-//w3c//dtd html 4.01 transitional//' -]); + // Ported from PostgreSQL 9.2.4 source code in src/interfaces/libpq/fe-exec.c + escapeLiteral(str) { + var hasBackslash = false + var escaped = "'" -//Utils -function enquoteDoctypeId(id) { - const quote = id.indexOf('"') !== -1 ? "'" : '"'; + for (var i = 0; i < str.length; i++) { + var c = str[i] + if (c === "'") { + escaped += c + c + } else if (c === '\\') { + escaped += c + c + hasBackslash = true + } else { + escaped += c + } + } - return quote + id + quote; -} + escaped += "'" -function hasPrefix(publicId, prefixes) { - for (let i = 0; i < prefixes.length; i++) { - if (publicId.indexOf(prefixes[i]) === 0) { - return true; - } + if (hasBackslash === true) { + escaped = ' E' + escaped } - return false; -} + return escaped + } -//API -exports.isConforming = function(token) { - return ( - token.name === VALID_DOCTYPE_NAME && - token.publicId === null && - (token.systemId === null || token.systemId === VALID_SYSTEM_ID) - ); -}; + _pulseQueryQueue() { + if (this.readyForQuery === true) { + this.activeQuery = this.queryQueue.shift() + if (this.activeQuery) { + this.readyForQuery = false + this.hasExecuted = true -exports.getDocumentMode = function(token) { - if (token.name !== VALID_DOCTYPE_NAME) { - return DOCUMENT_MODE.QUIRKS; + const queryError = this.activeQuery.submit(this.connection) + if (queryError) { + process.nextTick(() => { + this.activeQuery.handleError(queryError, this.connection) + this.readyForQuery = true + this._pulseQueryQueue() + }) + } + } else if (this.hasExecuted) { + this.activeQuery = null + this.emit('drain') + } } + } - const systemId = token.systemId; + query(config, values, callback) { + // can take in strings, config object or query object + var query + var result + var readTimeout + var readTimeoutTimer + var queryCallback - if (systemId && systemId.toLowerCase() === QUIRKS_MODE_SYSTEM_ID) { - return DOCUMENT_MODE.QUIRKS; + if (config === null || config === undefined) { + throw new TypeError('Client was passed a null or undefined query') + } else if (typeof config.submit === 'function') { + readTimeout = config.query_timeout || this.connectionParameters.query_timeout + result = query = config + if (typeof values === 'function') { + query.callback = query.callback || values + } + } else { + readTimeout = this.connectionParameters.query_timeout + query = new Query(config, values, callback) + if (!query.callback) { + result = new this._Promise((resolve, reject) => { + query.callback = (err, res) => (err ? reject(err) : resolve(res)) + }) + } } - let publicId = token.publicId; + if (readTimeout) { + queryCallback = query.callback - if (publicId !== null) { - publicId = publicId.toLowerCase(); + readTimeoutTimer = setTimeout(() => { + var error = new Error('Query read timeout') - if (QUIRKS_MODE_PUBLIC_IDS.indexOf(publicId) > -1) { - return DOCUMENT_MODE.QUIRKS; - } + process.nextTick(() => { + query.handleError(error, this.connection) + }) - let prefixes = systemId === null ? QUIRKS_MODE_NO_SYSTEM_ID_PUBLIC_ID_PREFIXES : QUIRKS_MODE_PUBLIC_ID_PREFIXES; + queryCallback(error) - if (hasPrefix(publicId, prefixes)) { - return DOCUMENT_MODE.QUIRKS; + // we already returned an error, + // just do nothing if query completes + query.callback = () => {} + + // Remove from queue + var index = this.queryQueue.indexOf(query) + if (index > -1) { + this.queryQueue.splice(index, 1) } - prefixes = - systemId === null ? LIMITED_QUIRKS_PUBLIC_ID_PREFIXES : LIMITED_QUIRKS_WITH_SYSTEM_ID_PUBLIC_ID_PREFIXES; + this._pulseQueryQueue() + }, readTimeout) + + query.callback = (err, res) => { + clearTimeout(readTimeoutTimer) + queryCallback(err, res) + } + } + + if (this.binary && !query.binary) { + query.binary = true + } + + if (query._result && !query._result._types) { + query._result._types = this._types + } - if (hasPrefix(publicId, prefixes)) { - return DOCUMENT_MODE.LIMITED_QUIRKS; - } + if (!this._queryable) { + process.nextTick(() => { + query.handleError(new Error('Client has encountered a connection error and is not queryable'), this.connection) + }) + return result } - return DOCUMENT_MODE.NO_QUIRKS; -}; + if (this._ending) { + process.nextTick(() => { + query.handleError(new Error('Client was closed and is not queryable'), this.connection) + }) + return result + } -exports.serializeContent = function(name, publicId, systemId) { - let str = '!DOCTYPE '; + this.queryQueue.push(query) + this._pulseQueryQueue() + return result + } - if (name) { - str += name; + end(cb) { + this._ending = true + + // if we have never connected, then end is a noop, callback immediately + if (!this.connection._connecting) { + if (cb) { + cb() + } else { + return this._Promise.resolve() + } } - if (publicId) { - str += ' PUBLIC ' + enquoteDoctypeId(publicId); - } else if (systemId) { - str += ' SYSTEM'; + if (this.activeQuery || !this._queryable) { + // if we have an active query we need to force a disconnect + // on the socket - otherwise a hung query could block end forever + this.connection.stream.destroy() + } else { + this.connection.end() } - if (systemId !== null) { - str += ' ' + enquoteDoctypeId(systemId); + if (cb) { + this.connection.once('end', cb) + } else { + return new this._Promise((resolve) => { + this.connection.once('end', resolve) + }) } + } +} - return str; -}; +// expose a Query constructor +Client.Query = Query + +module.exports = Client /***/ }), -/***/ 69338: -/***/ ((__unused_webpack_module, exports) => { +/***/ 19112: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -const NS = (exports.NAMESPACES = { - HTML: 'http://www.w3.org/1999/xhtml', - MATHML: 'http://www.w3.org/1998/Math/MathML', - SVG: 'http://www.w3.org/2000/svg', - XLINK: 'http://www.w3.org/1999/xlink', - XML: 'http://www.w3.org/XML/1998/namespace', - XMLNS: 'http://www.w3.org/2000/xmlns/' -}); - -exports.ATTRS = { - TYPE: 'type', - ACTION: 'action', - ENCODING: 'encoding', - PROMPT: 'prompt', - NAME: 'name', - COLOR: 'color', - FACE: 'face', - SIZE: 'size' -}; +var dns = __nccwpck_require__(40881) -exports.DOCUMENT_MODE = { - NO_QUIRKS: 'no-quirks', - QUIRKS: 'quirks', - LIMITED_QUIRKS: 'limited-quirks' -}; +var defaults = __nccwpck_require__(26419) -const $ = (exports.TAG_NAMES = { - A: 'a', - ADDRESS: 'address', - ANNOTATION_XML: 'annotation-xml', - APPLET: 'applet', - AREA: 'area', - ARTICLE: 'article', - ASIDE: 'aside', +var parse = __nccwpck_require__(38961).parse // parses a connection string - B: 'b', - BASE: 'base', - BASEFONT: 'basefont', - BGSOUND: 'bgsound', - BIG: 'big', - BLOCKQUOTE: 'blockquote', - BODY: 'body', - BR: 'br', - BUTTON: 'button', +var val = function (key, config, envVar) { + if (envVar === undefined) { + envVar = process.env['PG' + key.toUpperCase()] + } else if (envVar === false) { + // do nothing ... use false + } else { + envVar = process.env[envVar] + } - CAPTION: 'caption', - CENTER: 'center', - CODE: 'code', - COL: 'col', - COLGROUP: 'colgroup', + return config[key] || envVar || defaults[key] +} - DD: 'dd', - DESC: 'desc', - DETAILS: 'details', - DIALOG: 'dialog', - DIR: 'dir', - DIV: 'div', - DL: 'dl', - DT: 'dt', +var readSSLConfigFromEnvironment = function () { + switch (process.env.PGSSLMODE) { + case 'disable': + return false + case 'prefer': + case 'require': + case 'verify-ca': + case 'verify-full': + return true + case 'no-verify': + return { rejectUnauthorized: false } + } + return defaults.ssl +} - EM: 'em', - EMBED: 'embed', +// Convert arg to a string, surround in single quotes, and escape single quotes and backslashes +var quoteParamValue = function (value) { + return "'" + ('' + value).replace(/\\/g, '\\\\').replace(/'/g, "\\'") + "'" +} - FIELDSET: 'fieldset', - FIGCAPTION: 'figcaption', - FIGURE: 'figure', - FONT: 'font', - FOOTER: 'footer', - FOREIGN_OBJECT: 'foreignObject', - FORM: 'form', - FRAME: 'frame', - FRAMESET: 'frameset', +var add = function (params, config, paramName) { + var value = config[paramName] + if (value !== undefined && value !== null) { + params.push(paramName + '=' + quoteParamValue(value)) + } +} - H1: 'h1', - H2: 'h2', - H3: 'h3', - H4: 'h4', - H5: 'h5', - H6: 'h6', - HEAD: 'head', - HEADER: 'header', - HGROUP: 'hgroup', - HR: 'hr', - HTML: 'html', +class ConnectionParameters { + constructor(config) { + // if a string is passed, it is a raw connection string so we parse it into a config + config = typeof config === 'string' ? parse(config) : config || {} - I: 'i', - IMG: 'img', - IMAGE: 'image', - INPUT: 'input', - IFRAME: 'iframe', + // if the config has a connectionString defined, parse IT into the config we use + // this will override other default values with what is stored in connectionString + if (config.connectionString) { + config = Object.assign({}, config, parse(config.connectionString)) + } - KEYGEN: 'keygen', + this.user = val('user', config) + this.database = val('database', config) - LABEL: 'label', - LI: 'li', - LINK: 'link', - LISTING: 'listing', + if (this.database === undefined) { + this.database = this.user + } - MAIN: 'main', - MALIGNMARK: 'malignmark', - MARQUEE: 'marquee', - MATH: 'math', - MENU: 'menu', - META: 'meta', - MGLYPH: 'mglyph', - MI: 'mi', - MO: 'mo', - MN: 'mn', - MS: 'ms', - MTEXT: 'mtext', + this.port = parseInt(val('port', config), 10) + this.host = val('host', config) - NAV: 'nav', - NOBR: 'nobr', - NOFRAMES: 'noframes', - NOEMBED: 'noembed', - NOSCRIPT: 'noscript', + // "hiding" the password so it doesn't show up in stack traces + // or if the client is console.logged + Object.defineProperty(this, 'password', { + configurable: true, + enumerable: false, + writable: true, + value: val('password', config), + }) - OBJECT: 'object', - OL: 'ol', - OPTGROUP: 'optgroup', - OPTION: 'option', + this.binary = val('binary', config) + this.options = val('options', config) - P: 'p', - PARAM: 'param', - PLAINTEXT: 'plaintext', - PRE: 'pre', + this.ssl = typeof config.ssl === 'undefined' ? readSSLConfigFromEnvironment() : config.ssl - RB: 'rb', - RP: 'rp', - RT: 'rt', - RTC: 'rtc', - RUBY: 'ruby', + if (typeof this.ssl === 'string') { + if (this.ssl === 'true') { + this.ssl = true + } + } + // support passing in ssl=no-verify via connection string + if (this.ssl === 'no-verify') { + this.ssl = { rejectUnauthorized: false } + } + if (this.ssl && this.ssl.key) { + Object.defineProperty(this.ssl, 'key', { + enumerable: false, + }) + } - S: 's', - SCRIPT: 'script', - SECTION: 'section', - SELECT: 'select', - SOURCE: 'source', - SMALL: 'small', - SPAN: 'span', - STRIKE: 'strike', - STRONG: 'strong', - STYLE: 'style', - SUB: 'sub', - SUMMARY: 'summary', - SUP: 'sup', + this.client_encoding = val('client_encoding', config) + this.replication = val('replication', config) + // a domain socket begins with '/' + this.isDomainSocket = !(this.host || '').indexOf('/') - TABLE: 'table', - TBODY: 'tbody', - TEMPLATE: 'template', - TEXTAREA: 'textarea', - TFOOT: 'tfoot', - TD: 'td', - TH: 'th', - THEAD: 'thead', - TITLE: 'title', - TR: 'tr', - TRACK: 'track', - TT: 'tt', + this.application_name = val('application_name', config, 'PGAPPNAME') + this.fallback_application_name = val('fallback_application_name', config, false) + this.statement_timeout = val('statement_timeout', config, false) + this.idle_in_transaction_session_timeout = val('idle_in_transaction_session_timeout', config, false) + this.query_timeout = val('query_timeout', config, false) - U: 'u', - UL: 'ul', + if (config.connectionTimeoutMillis === undefined) { + this.connect_timeout = process.env.PGCONNECT_TIMEOUT || 0 + } else { + this.connect_timeout = Math.floor(config.connectionTimeoutMillis / 1000) + } - SVG: 'svg', + if (config.keepAlive === false) { + this.keepalives = 0 + } else if (config.keepAlive === true) { + this.keepalives = 1 + } - VAR: 'var', + if (typeof config.keepAliveInitialDelayMillis === 'number') { + this.keepalives_idle = Math.floor(config.keepAliveInitialDelayMillis / 1000) + } + } - WBR: 'wbr', + getLibpqConnectionString(cb) { + var params = [] + add(params, this, 'user') + add(params, this, 'password') + add(params, this, 'port') + add(params, this, 'application_name') + add(params, this, 'fallback_application_name') + add(params, this, 'connect_timeout') + add(params, this, 'options') - XMP: 'xmp' -}); + var ssl = typeof this.ssl === 'object' ? this.ssl : this.ssl ? { sslmode: this.ssl } : {} + add(params, ssl, 'sslmode') + add(params, ssl, 'sslca') + add(params, ssl, 'sslkey') + add(params, ssl, 'sslcert') + add(params, ssl, 'sslrootcert') -exports.SPECIAL_ELEMENTS = { - [NS.HTML]: { - [$.ADDRESS]: true, - [$.APPLET]: true, - [$.AREA]: true, - [$.ARTICLE]: true, - [$.ASIDE]: true, - [$.BASE]: true, - [$.BASEFONT]: true, - [$.BGSOUND]: true, - [$.BLOCKQUOTE]: true, - [$.BODY]: true, - [$.BR]: true, - [$.BUTTON]: true, - [$.CAPTION]: true, - [$.CENTER]: true, - [$.COL]: true, - [$.COLGROUP]: true, - [$.DD]: true, - [$.DETAILS]: true, - [$.DIR]: true, - [$.DIV]: true, - [$.DL]: true, - [$.DT]: true, - [$.EMBED]: true, - [$.FIELDSET]: true, - [$.FIGCAPTION]: true, - [$.FIGURE]: true, - [$.FOOTER]: true, - [$.FORM]: true, - [$.FRAME]: true, - [$.FRAMESET]: true, - [$.H1]: true, - [$.H2]: true, - [$.H3]: true, - [$.H4]: true, - [$.H5]: true, - [$.H6]: true, - [$.HEAD]: true, - [$.HEADER]: true, - [$.HGROUP]: true, - [$.HR]: true, - [$.HTML]: true, - [$.IFRAME]: true, - [$.IMG]: true, - [$.INPUT]: true, - [$.LI]: true, - [$.LINK]: true, - [$.LISTING]: true, - [$.MAIN]: true, - [$.MARQUEE]: true, - [$.MENU]: true, - [$.META]: true, - [$.NAV]: true, - [$.NOEMBED]: true, - [$.NOFRAMES]: true, - [$.NOSCRIPT]: true, - [$.OBJECT]: true, - [$.OL]: true, - [$.P]: true, - [$.PARAM]: true, - [$.PLAINTEXT]: true, - [$.PRE]: true, - [$.SCRIPT]: true, - [$.SECTION]: true, - [$.SELECT]: true, - [$.SOURCE]: true, - [$.STYLE]: true, - [$.SUMMARY]: true, - [$.TABLE]: true, - [$.TBODY]: true, - [$.TD]: true, - [$.TEMPLATE]: true, - [$.TEXTAREA]: true, - [$.TFOOT]: true, - [$.TH]: true, - [$.THEAD]: true, - [$.TITLE]: true, - [$.TR]: true, - [$.TRACK]: true, - [$.UL]: true, - [$.WBR]: true, - [$.XMP]: true - }, - [NS.MATHML]: { - [$.MI]: true, - [$.MO]: true, - [$.MN]: true, - [$.MS]: true, - [$.MTEXT]: true, - [$.ANNOTATION_XML]: true - }, - [NS.SVG]: { - [$.TITLE]: true, - [$.FOREIGN_OBJECT]: true, - [$.DESC]: true + if (this.database) { + params.push('dbname=' + quoteParamValue(this.database)) } -}; + if (this.replication) { + params.push('replication=' + quoteParamValue(this.replication)) + } + if (this.host) { + params.push('host=' + quoteParamValue(this.host)) + } + if (this.isDomainSocket) { + return cb(null, params.join(' ')) + } + if (this.client_encoding) { + params.push('client_encoding=' + quoteParamValue(this.client_encoding)) + } + dns.lookup(this.host, function (err, address) { + if (err) return cb(err, null) + params.push('hostaddr=' + quoteParamValue(address)) + return cb(null, params.join(' ')) + }) + } +} + +module.exports = ConnectionParameters /***/ }), -/***/ 38714: -/***/ ((module) => { +/***/ 62848: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -function posix(path) { - return path.charAt(0) === '/'; -} - -function win32(path) { - // https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56 - var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/; - var result = splitDeviceRe.exec(path); - var device = result[1] || ''; - var isUnc = Boolean(device && device.charAt(1) !== ':'); - - // UNC paths are always absolute - return Boolean(result[2] || isUnc); -} - -module.exports = process.platform === 'win32' ? win32 : posix; -module.exports.posix = posix; -module.exports.win32 = win32; - - -/***/ }), +var net = __nccwpck_require__(11631) +var EventEmitter = __nccwpck_require__(28614).EventEmitter +var util = __nccwpck_require__(31669) -/***/ 85644: -/***/ (function(module) { +const { parse, serialize } = __nccwpck_require__(21113) -// Generated by CoffeeScript 1.12.2 -(function() { - var getNanoSeconds, hrtime, loadTime, moduleLoadTime, nodeLoadTime, upTime; +const flushBuffer = serialize.flush() +const syncBuffer = serialize.sync() +const endBuffer = serialize.end() - if ((typeof performance !== "undefined" && performance !== null) && performance.now) { - module.exports = function() { - return performance.now(); - }; - } else if ((typeof process !== "undefined" && process !== null) && process.hrtime) { - module.exports = function() { - return (getNanoSeconds() - nodeLoadTime) / 1e6; - }; - hrtime = process.hrtime; - getNanoSeconds = function() { - var hr; - hr = hrtime(); - return hr[0] * 1e9 + hr[1]; - }; - moduleLoadTime = getNanoSeconds(); - upTime = process.uptime() * 1e9; - nodeLoadTime = moduleLoadTime - upTime; - } else if (Date.now) { - module.exports = function() { - return Date.now() - loadTime; - }; - loadTime = Date.now(); - } else { - module.exports = function() { - return new Date().getTime() - loadTime; - }; - loadTime = new Date().getTime(); +// TODO(bmc) support binary mode at some point +class Connection extends EventEmitter { + constructor(config) { + super() + config = config || {} + this.stream = config.stream || new net.Socket() + this._keepAlive = config.keepAlive + this._keepAliveInitialDelayMillis = config.keepAliveInitialDelayMillis + this.lastBuffer = false + this.parsedStatements = {} + this.ssl = config.ssl || false + this._ending = false + this._emitMessage = false + var self = this + this.on('newListener', function (eventName) { + if (eventName === 'message') { + self._emitMessage = true + } + }) } -}).call(this); + connect(port, host) { + var self = this -//# sourceMappingURL=performance-now.js.map + this._connecting = true + this.stream.setNoDelay(true) + this.stream.connect(port, host) + + this.stream.once('connect', function () { + if (self._keepAlive) { + self.stream.setKeepAlive(true, self._keepAliveInitialDelayMillis) + } + self.emit('connect') + }) + const reportStreamError = function (error) { + // errors about disconnections should be ignored during disconnect + if (self._ending && (error.code === 'ECONNRESET' || error.code === 'EPIPE')) { + return + } + self.emit('error', error) + } + this.stream.on('error', reportStreamError) -/***/ }), + this.stream.on('close', function () { + self.emit('end') + }) -/***/ 38961: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (!this.ssl) { + return this.attachListeners(this.stream) + } -"use strict"; + this.stream.once('data', function (buffer) { + var responseCode = buffer.toString('utf8') + switch (responseCode) { + case 'S': // Server supports SSL connections, continue with a secure connection + break + case 'N': // Server does not support SSL connections + self.stream.end() + return self.emit('error', new Error('The server does not support SSL connections')) + default: + // Any other response byte, including 'E' (ErrorResponse) indicating a server error + self.stream.end() + return self.emit('error', new Error('There was an error establishing an SSL connection')) + } + var tls = __nccwpck_require__(4016) + const options = { + socket: self.stream, + } + if (self.ssl !== true) { + Object.assign(options, self.ssl) -var url = __nccwpck_require__(78835) -var fs = __nccwpck_require__(35747) + if ('key' in self.ssl) { + options.key = self.ssl.key + } + } -//Parse method copied from https://github.com/brianc/node-postgres -//Copyright (c) 2010-2014 Brian Carlson (brian.m.carlson@gmail.com) -//MIT License + if (net.isIP(host) === 0) { + options.servername = host + } + try { + self.stream = tls.connect(options) + } catch (err) { + return self.emit('error', err) + } + self.attachListeners(self.stream) + self.stream.on('error', reportStreamError) -//parses a connection string -function parse(str) { - //unix socket - if (str.charAt(0) === '/') { - var config = str.split(' ') - return { host: config[0], database: config[1] } + self.emit('sslconnect') + }) } - // url parse expects spaces encoded as %20 - var result = url.parse( - / |%[^a-f0-9]|%[a-f0-9][^a-f0-9]/i.test(str) ? encodeURI(str).replace(/\%25(\d\d)/g, '%$1') : str, - true - ) - var config = result.query - for (var k in config) { - if (Array.isArray(config[k])) { - config[k] = config[k][config[k].length - 1] - } + attachListeners(stream) { + stream.on('end', () => { + this.emit('end') + }) + parse(stream, (msg) => { + var eventName = msg.name === 'error' ? 'errorMessage' : msg.name + if (this._emitMessage) { + this.emit('message', msg) + } + this.emit(eventName, msg) + }) } - var auth = (result.auth || ':').split(':') - config.user = auth[0] - config.password = auth.splice(1).join(':') + requestSsl() { + this.stream.write(serialize.requestSsl()) + } - config.port = result.port - if (result.protocol == 'socket:') { - config.host = decodeURI(result.pathname) - config.database = result.query.db - config.client_encoding = result.query.encoding - return config + startup(config) { + this.stream.write(serialize.startup(config)) } - if (!config.host) { - // Only set the host if there is no equivalent query param. - config.host = result.hostname + + cancel(processID, secretKey) { + this._send(serialize.cancel(processID, secretKey)) } - // If the host is missing it might be a URL-encoded path to a socket. - var pathname = result.pathname - if (!config.host && pathname && /^%2f/i.test(pathname)) { - var pathnameSplit = pathname.split('/') - config.host = decodeURIComponent(pathnameSplit[0]) - pathname = pathnameSplit.splice(1).join('/') + password(password) { + this._send(serialize.password(password)) } - // result.pathname is not always guaranteed to have a '/' prefix (e.g. relative urls) - // only strip the slash if it is present. - if (pathname && pathname.charAt(0) === '/') { - pathname = pathname.slice(1) || null + + sendSASLInitialResponseMessage(mechanism, initialResponse) { + this._send(serialize.sendSASLInitialResponseMessage(mechanism, initialResponse)) } - config.database = pathname && decodeURI(pathname) - if (config.ssl === 'true' || config.ssl === '1') { - config.ssl = true + sendSCRAMClientFinalMessage(additionalData) { + this._send(serialize.sendSCRAMClientFinalMessage(additionalData)) } - if (config.ssl === '0') { - config.ssl = false + _send(buffer) { + if (!this.stream.writable) { + return false + } + return this.stream.write(buffer) } - if (config.sslcert || config.sslkey || config.sslrootcert || config.sslmode) { - config.ssl = {} + query(text) { + this._send(serialize.query(text)) } - if (config.sslcert) { - config.ssl.cert = fs.readFileSync(config.sslcert).toString() + // send parse message + parse(query) { + this._send(serialize.parse(query)) } - if (config.sslkey) { - config.ssl.key = fs.readFileSync(config.sslkey).toString() + // send bind message + bind(config) { + this._send(serialize.bind(config)) } - if (config.sslrootcert) { - config.ssl.ca = fs.readFileSync(config.sslrootcert).toString() + // send execute message + execute(config) { + this._send(serialize.execute(config)) } - switch (config.sslmode) { - case 'disable': { - config.ssl = false - break - } - case 'prefer': - case 'require': - case 'verify-ca': - case 'verify-full': { - break - } - case 'no-verify': { - config.ssl.rejectUnauthorized = false - break + flush() { + if (this.stream.writable) { + this.stream.write(flushBuffer) } } - return config -} - -module.exports = parse - -parse.parse = parse - - -/***/ }), - -/***/ 41180: -/***/ ((module) => { - -"use strict"; - - -// selected so (BASE - 1) * 0x100000000 + 0xffffffff is a safe integer -var BASE = 1000000; - -function readInt8(buffer) { - var high = buffer.readInt32BE(0); - var low = buffer.readUInt32BE(4); - var sign = ''; - - if (high < 0) { - high = ~high + (low === 0); - low = (~low + 1) >>> 0; - sign = '-'; - } - - var result = ''; - var carry; - var t; - var digits; - var pad; - var l; - var i; + sync() { + this._ending = true + this._send(flushBuffer) + this._send(syncBuffer) + } - { - carry = high % BASE; - high = high / BASE >>> 0; + end() { + // 0x58 = 'X' + this._ending = true + if (!this._connecting || !this.stream.writable) { + this.stream.end() + return + } + return this.stream.write(endBuffer, () => { + this.stream.end() + }) + } - t = 0x100000000 * carry + low; - low = t / BASE >>> 0; - digits = '' + (t - BASE * low); + close(msg) { + this._send(serialize.close(msg)) + } - if (low === 0 && high === 0) { - return sign + digits + result; - } + describe(msg) { + this._send(serialize.describe(msg)) + } - pad = ''; - l = 6 - digits.length; + sendCopyFromChunk(chunk) { + this._send(serialize.copyData(chunk)) + } - for (i = 0; i < l; i++) { - pad += '0'; - } + endCopyFrom() { + this._send(serialize.copyDone()) + } - result = pad + digits + result; - } + sendCopyFail(msg) { + this._send(serialize.copyFail(msg)) + } +} - { - carry = high % BASE; - high = high / BASE >>> 0; +module.exports = Connection - t = 0x100000000 * carry + low; - low = t / BASE >>> 0; - digits = '' + (t - BASE * low); - if (low === 0 && high === 0) { - return sign + digits + result; - } +/***/ }), - pad = ''; - l = 6 - digits.length; +/***/ 26419: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - for (i = 0; i < l; i++) { - pad += '0'; - } +"use strict"; - result = pad + digits + result; - } - { - carry = high % BASE; - high = high / BASE >>> 0; +module.exports = { + // database host. defaults to localhost + host: 'localhost', - t = 0x100000000 * carry + low; - low = t / BASE >>> 0; - digits = '' + (t - BASE * low); + // database user's name + user: process.platform === 'win32' ? process.env.USERNAME : process.env.USER, - if (low === 0 && high === 0) { - return sign + digits + result; - } + // name of database to connect + database: undefined, - pad = ''; - l = 6 - digits.length; + // database user's password + password: null, - for (i = 0; i < l; i++) { - pad += '0'; - } + // a Postgres connection string to be used instead of setting individual connection items + // NOTE: Setting this value will cause it to override any other value (such as database or user) defined + // in the defaults object. + connectionString: undefined, - result = pad + digits + result; - } + // database port + port: 5432, - { - carry = high % BASE; - t = 0x100000000 * carry + low; - digits = '' + t % BASE; + // number of rows to return at a time from a prepared statement's + // portal. 0 will return all rows at once + rows: 0, - return sign + digits + result; - } -} + // binary result mode + binary: false, -module.exports = readInt8; + // Connection pool options - see https://github.com/brianc/node-pg-pool + // number of connections to use in connection pool + // 0 will disable connection pooling + max: 10, -/***/ }), + // max milliseconds a client can go unused before it is removed + // from the pool and destroyed + idleTimeoutMillis: 30000, -/***/ 99599: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + client_encoding: '', -"use strict"; + ssl: false, -const EventEmitter = __nccwpck_require__(28614).EventEmitter + application_name: undefined, -const NOOP = function () {} + fallback_application_name: undefined, -const removeWhere = (list, predicate) => { - const i = list.findIndex(predicate) + options: undefined, - return i === -1 ? undefined : list.splice(i, 1)[0] -} + parseInputDatesAsUTC: false, -class IdleItem { - constructor(client, idleListener, timeoutId) { - this.client = client - this.idleListener = idleListener - this.timeoutId = timeoutId - } -} + // max milliseconds any query using this connection will execute for before timing out in error. + // false=unlimited + statement_timeout: false, -class PendingItem { - constructor(callback) { - this.callback = callback - } -} + // Terminate any session with an open transaction that has been idle for longer than the specified duration in milliseconds + // false=unlimited + idle_in_transaction_session_timeout: false, -function throwOnDoubleRelease() { - throw new Error('Release called on client which has already been released to the pool.') -} + // max milliseconds to wait for query to complete (client side) + query_timeout: false, -function promisify(Promise, callback) { - if (callback) { - return { callback: callback, result: undefined } - } - let rej - let res - const cb = function (err, client) { - err ? rej(err) : res(client) - } - const result = new Promise(function (resolve, reject) { - res = resolve - rej = reject - }) - return { callback: cb, result: result } -} + connect_timeout: 0, -function makeIdleListener(pool, client) { - return function idleListener(err) { - err.client = client + keepalives: 1, - client.removeListener('error', idleListener) - client.on('error', () => { - pool.log('additional client error after disconnection due to error', err) - }) - pool._remove(client) - // TODO - document that once the pool emits an error - // the client has already been closed & purged and is unusable - pool.emit('error', err, client) - } + keepalives_idle: 0, } -class Pool extends EventEmitter { - constructor(options, Client) { - super() - this.options = Object.assign({}, options) - - if (options != null && 'password' in options) { - // "hiding" the password so it doesn't show up in stack traces - // or if the client is console.logged - Object.defineProperty(this.options, 'password', { - configurable: true, - enumerable: false, - writable: true, - value: options.password, - }) - } - if (options != null && options.ssl && options.ssl.key) { - // "hiding" the ssl->key so it doesn't show up in stack traces - // or if the client is console.logged - Object.defineProperty(this.options.ssl, 'key', { - enumerable: false, - }) - } - - this.options.max = this.options.max || this.options.poolSize || 10 - this.options.maxUses = this.options.maxUses || Infinity - this.log = this.options.log || function () {} - this.Client = this.options.Client || Client || __nccwpck_require__(44194).Client - this.Promise = this.options.Promise || global.Promise - - if (typeof this.options.idleTimeoutMillis === 'undefined') { - this.options.idleTimeoutMillis = 10000 - } - - this._clients = [] - this._idle = [] - this._pendingQueue = [] - this._endCallback = undefined - this.ending = false - this.ended = false - } - - _isFull() { - return this._clients.length >= this.options.max - } - - _pulseQueue() { - this.log('pulse queue') - if (this.ended) { - this.log('pulse queue ended') - return - } - if (this.ending) { - this.log('pulse queue on ending') - if (this._idle.length) { - this._idle.slice().map((item) => { - this._remove(item.client) - }) - } - if (!this._clients.length) { - this.ended = true - this._endCallback() - } - return - } - // if we don't have any waiting, do nothing - if (!this._pendingQueue.length) { - this.log('no queued requests') - return - } - // if we don't have any idle clients and we have no more room do nothing - if (!this._idle.length && this._isFull()) { - return - } - const pendingItem = this._pendingQueue.shift() - if (this._idle.length) { - const idleItem = this._idle.pop() - clearTimeout(idleItem.timeoutId) - const client = idleItem.client - const idleListener = idleItem.idleListener - - return this._acquireClient(client, pendingItem, idleListener, false) - } - if (!this._isFull()) { - return this.newClient(pendingItem) - } - throw new Error('unexpected condition') - } +var pgTypes = __nccwpck_require__(77929) +// save default parsers +var parseBigInteger = pgTypes.getTypeParser(20, 'text') +var parseBigIntegerArray = pgTypes.getTypeParser(1016, 'text') - _remove(client) { - const removed = removeWhere(this._idle, (item) => item.client === client) +// parse int8 so you can get your count values as actual numbers +module.exports.__defineSetter__('parseInt8', function (val) { + pgTypes.setTypeParser(20, 'text', val ? pgTypes.getTypeParser(23, 'text') : parseBigInteger) + pgTypes.setTypeParser(1016, 'text', val ? pgTypes.getTypeParser(1007, 'text') : parseBigIntegerArray) +}) - if (removed !== undefined) { - clearTimeout(removed.timeoutId) - } - this._clients = this._clients.filter((c) => c !== client) - client.end() - this.emit('remove', client) - } +/***/ }), - connect(cb) { - if (this.ending) { - const err = new Error('Cannot use a pool after calling end on the pool') - return cb ? cb(err) : this.Promise.reject(err) +/***/ 44194: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var Client = __nccwpck_require__(37143) +var defaults = __nccwpck_require__(26419) +var Connection = __nccwpck_require__(62848) +var Pool = __nccwpck_require__(99599) + +const poolFactory = (Client) => { + return class BoundPool extends Pool { + constructor(options) { + super(options, Client) } + } +} - const response = promisify(this.Promise, cb) - const result = response.result +var PG = function (clientConstructor) { + this.defaults = defaults + this.Client = clientConstructor + this.Query = this.Client.Query + this.Pool = poolFactory(this.Client) + this._pools = [] + this.Connection = Connection + this.types = __nccwpck_require__(77929) +} - // if we don't have to connect a new client, don't do so - if (this._clients.length >= this.options.max || this._idle.length) { - // if we have idle clients schedule a pulse immediately - if (this._idle.length) { - process.nextTick(() => this._pulseQueue()) - } +if (typeof process.env.NODE_PG_FORCE_NATIVE !== 'undefined') { + module.exports = new PG(__nccwpck_require__(43148)) +} else { + module.exports = new PG(Client) - if (!this.options.connectionTimeoutMillis) { - this._pendingQueue.push(new PendingItem(response.callback)) - return result + // lazy require native module...the native module may not have installed + Object.defineProperty(module.exports, "native", ({ + configurable: true, + enumerable: false, + get() { + var native = null + try { + native = new PG(__nccwpck_require__(43148)) + } catch (err) { + if (err.code !== 'MODULE_NOT_FOUND') { + throw err + } } - const queueCallback = (err, res, done) => { - clearTimeout(tid) - response.callback(err, res, done) - } + // overwrite module.exports.native so that getter is never called again + Object.defineProperty(module.exports, "native", ({ + value: native, + })) - const pendingItem = new PendingItem(queueCallback) + return native + }, + })) +} - // set connection timeout on checking out an existing client - const tid = setTimeout(() => { - // remove the callback from pending waiters because - // we're going to call it with a timeout error - removeWhere(this._pendingQueue, (i) => i.callback === queueCallback) - pendingItem.timedOut = true - response.callback(new Error('timeout exceeded when trying to connect')) - }, this.options.connectionTimeoutMillis) - this._pendingQueue.push(pendingItem) - return result - } +/***/ }), - this.newClient(new PendingItem(response.callback)) +/***/ 1094: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - return result - } +"use strict"; - newClient(pendingItem) { - const client = new this.Client(this.options) - this._clients.push(client) - const idleListener = makeIdleListener(this, client) - this.log('checking client timeout') +// eslint-disable-next-line +var Native = __nccwpck_require__(38889) +var TypeOverrides = __nccwpck_require__(78873) +var pkg = __nccwpck_require__(60257) +var EventEmitter = __nccwpck_require__(28614).EventEmitter +var util = __nccwpck_require__(31669) +var ConnectionParameters = __nccwpck_require__(19112) - // connection timeout logic - let tid - let timeoutHit = false - if (this.options.connectionTimeoutMillis) { - tid = setTimeout(() => { - this.log('ending client due to timeout') - timeoutHit = true - // force kill the node driver, and let libpq do its teardown - client.connection ? client.connection.stream.destroy() : client.end() - }, this.options.connectionTimeoutMillis) - } +var NativeQuery = __nccwpck_require__(91886) - this.log('connecting new client') - client.connect((err) => { - if (tid) { - clearTimeout(tid) - } - client.on('error', idleListener) - if (err) { - this.log('client failed to connect', err) - // remove the dead client from our list of clients - this._clients = this._clients.filter((c) => c !== client) - if (timeoutHit) { - err.message = 'Connection terminated due to connection timeout' - } +var Client = (module.exports = function (config) { + EventEmitter.call(this) + config = config || {} - // this client won’t be released, so move on immediately - this._pulseQueue() + this._Promise = config.Promise || global.Promise + this._types = new TypeOverrides(config.types) - if (!pendingItem.timedOut) { - pendingItem.callback(err, undefined, NOOP) - } - } else { - this.log('new client connected') + this.native = new Native({ + types: this._types, + }) - return this._acquireClient(client, pendingItem, idleListener, true) - } + this._queryQueue = [] + this._ending = false + this._connecting = false + this._connected = false + this._queryable = true + + // keep these on the object for legacy reasons + // for the time being. TODO: deprecate all this jazz + var cp = (this.connectionParameters = new ConnectionParameters(config)) + this.user = cp.user + + // "hiding" the password so it doesn't show up in stack traces + // or if the client is console.logged + Object.defineProperty(this, 'password', { + configurable: true, + enumerable: false, + writable: true, + value: cp.password, + }) + this.database = cp.database + this.host = cp.host + this.port = cp.port + + // a hash to hold named queries + this.namedQueries = {} +}) + +Client.Query = NativeQuery + +util.inherits(Client, EventEmitter) + +Client.prototype._errorAllQueries = function (err) { + const enqueueError = (query) => { + process.nextTick(() => { + query.native = this.native + query.handleError(err) }) } - // acquire a client for a pending work item - _acquireClient(client, pendingItem, idleListener, isNew) { - if (isNew) { - this.emit('connect', client) - } + if (this._hasActiveQuery()) { + enqueueError(this._activeQuery) + this._activeQuery = null + } - this.emit('acquire', client) + this._queryQueue.forEach(enqueueError) + this._queryQueue.length = 0 +} - client.release = this._releaseOnce(client, idleListener) +// connect to the backend +// pass an optional callback to be called once connected +// or with an error if there was a connection error +Client.prototype._connect = function (cb) { + var self = this - client.removeListener('error', idleListener) + if (this._connecting) { + process.nextTick(() => cb(new Error('Client has already been connected. You cannot reuse a client.'))) + return + } - if (!pendingItem.timedOut) { - if (isNew && this.options.verify) { - this.options.verify(client, (err) => { - if (err) { - client.release(err) - return pendingItem.callback(err, undefined, NOOP) - } + this._connecting = true - pendingItem.callback(undefined, client, client.release) - }) - } else { - pendingItem.callback(undefined, client, client.release) - } - } else { - if (isNew && this.options.verify) { - this.options.verify(client, client.release) - } else { - client.release() + this.connectionParameters.getLibpqConnectionString(function (err, conString) { + if (err) return cb(err) + self.native.connect(conString, function (err) { + if (err) { + self.native.end() + return cb(err) } - } - } - // returns a function that wraps _release and throws if called more than once - _releaseOnce(client, idleListener) { - let released = false + // set internal states to connected + self._connected = true - return (err) => { - if (released) { - throwOnDoubleRelease() - } + // handle connection errors from the native layer + self.native.on('error', function (err) { + self._queryable = false + self._errorAllQueries(err) + self.emit('error', err) + }) - released = true - this._release(client, idleListener, err) - } - } + self.native.on('notification', function (msg) { + self.emit('notification', { + channel: msg.relname, + payload: msg.extra, + }) + }) - // release a client back to the poll, include an error - // to remove it from the pool - _release(client, idleListener, err) { - client.on('error', idleListener) + // signal we are connected now + self.emit('connect') + self._pulseQueryQueue(true) - client._poolUseCount = (client._poolUseCount || 0) + 1 + cb() + }) + }) +} - // TODO(bmc): expose a proper, public interface _queryable and _ending - if (err || this.ending || !client._queryable || client._ending || client._poolUseCount >= this.options.maxUses) { - if (client._poolUseCount >= this.options.maxUses) { - this.log('remove expended client') +Client.prototype.connect = function (callback) { + if (callback) { + this._connect(callback) + return + } + + return new this._Promise((resolve, reject) => { + this._connect((error) => { + if (error) { + reject(error) + } else { + resolve() } - this._remove(client) - this._pulseQueue() - return - } + }) + }) +} - // idle timeout - let tid - if (this.options.idleTimeoutMillis) { - tid = setTimeout(() => { - this.log('remove idle client') - this._remove(client) - }, this.options.idleTimeoutMillis) - } +// send a query to the server +// this method is highly overloaded to take +// 1) string query, optional array of parameters, optional function callback +// 2) object query with { +// string query +// optional array values, +// optional function callback instead of as a separate parameter +// optional string name to name & cache the query plan +// optional string rowMode = 'array' for an array of results +// } +Client.prototype.query = function (config, values, callback) { + var query + var result + var readTimeout + var readTimeoutTimer + var queryCallback - this._idle.push(new IdleItem(client, idleListener, tid)) - this._pulseQueue() + if (config === null || config === undefined) { + throw new TypeError('Client was passed a null or undefined query') + } else if (typeof config.submit === 'function') { + readTimeout = config.query_timeout || this.connectionParameters.query_timeout + result = query = config + // accept query(new Query(...), (err, res) => { }) style + if (typeof values === 'function') { + config.callback = values + } + } else { + readTimeout = this.connectionParameters.query_timeout + query = new NativeQuery(config, values, callback) + if (!query.callback) { + let resolveOut, rejectOut + result = new this._Promise((resolve, reject) => { + resolveOut = resolve + rejectOut = reject + }) + query.callback = (err, res) => (err ? rejectOut(err) : resolveOut(res)) + } } - query(text, values, cb) { - // guard clause against passing a function as the first parameter - if (typeof text === 'function') { - const response = promisify(this.Promise, text) - setImmediate(function () { - return response.callback(new Error('Passing a function as the first parameter to pool.query is not supported')) + if (readTimeout) { + queryCallback = query.callback + + readTimeoutTimer = setTimeout(() => { + var error = new Error('Query read timeout') + + process.nextTick(() => { + query.handleError(error, this.connection) }) - return response.result - } - // allow plain text query without values - if (typeof values === 'function') { - cb = values - values = undefined - } - const response = promisify(this.Promise, cb) - cb = response.callback + queryCallback(error) - this.connect((err, client) => { - if (err) { - return cb(err) - } + // we already returned an error, + // just do nothing if query completes + query.callback = () => {} - let clientReleased = false - const onError = (err) => { - if (clientReleased) { - return - } - clientReleased = true - client.release(err) - cb(err) + // Remove from queue + var index = this._queryQueue.indexOf(query) + if (index > -1) { + this._queryQueue.splice(index, 1) } - client.once('error', onError) - this.log('dispatching query') - client.query(text, values, (err, res) => { - this.log('query dispatched') - client.removeListener('error', onError) - if (clientReleased) { - return - } - clientReleased = true - client.release(err) - if (err) { - return cb(err) - } else { - return cb(undefined, res) - } - }) - }) - return response.result - } + this._pulseQueryQueue() + }, readTimeout) - end(cb) { - this.log('ending') - if (this.ending) { - const err = new Error('Called end on pool more than once') - return cb ? cb(err) : this.Promise.reject(err) + query.callback = (err, res) => { + clearTimeout(readTimeoutTimer) + queryCallback(err, res) } - this.ending = true - const promised = promisify(this.Promise, cb) - this._endCallback = promised.callback - this._pulseQueue() - return promised.result } - get waitingCount() { - return this._pendingQueue.length + if (!this._queryable) { + query.native = this.native + process.nextTick(() => { + query.handleError(new Error('Client has encountered a connection error and is not queryable')) + }) + return result } - get idleCount() { - return this._idle.length + if (this._ending) { + query.native = this.native + process.nextTick(() => { + query.handleError(new Error('Client was closed and is not queryable')) + }) + return result } - get totalCount() { - return this._clients.length - } + this._queryQueue.push(query) + this._pulseQueryQueue() + return result } -module.exports = Pool +// disconnect from the backend server +Client.prototype.end = function (cb) { + var self = this -/***/ }), + this._ending = true -/***/ 10320: -/***/ ((__unused_webpack_module, exports) => { + if (!this._connected) { + this.once('connect', this.end.bind(this, cb)) + } + var result + if (!cb) { + result = new this._Promise(function (resolve, reject) { + cb = (err) => (err ? reject(err) : resolve()) + }) + } + this.native.end(function () { + self._errorAllQueries(new Error('Connection terminated')) -"use strict"; + process.nextTick(() => { + self.emit('end') + if (cb) cb() + }) + }) + return result +} -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.BufferReader = void 0; -const emptyBuffer = Buffer.allocUnsafe(0); -class BufferReader { - constructor(offset = 0) { - this.offset = offset; - this.buffer = emptyBuffer; - // TODO(bmc): support non-utf8 encoding? - this.encoding = 'utf-8'; - } - setBuffer(offset, buffer) { - this.offset = offset; - this.buffer = buffer; - } - int16() { - const result = this.buffer.readInt16BE(this.offset); - this.offset += 2; - return result; - } - byte() { - const result = this.buffer[this.offset]; - this.offset++; - return result; - } - int32() { - const result = this.buffer.readInt32BE(this.offset); - this.offset += 4; - return result; - } - string(length) { - const result = this.buffer.toString(this.encoding, this.offset, this.offset + length); - this.offset += length; - return result; - } - cstring() { - const start = this.offset; - let end = start; - while (this.buffer[end++] !== 0) { } - this.offset = end; - return this.buffer.toString(this.encoding, start, end - 1); - } - bytes(length) { - const result = this.buffer.slice(this.offset, this.offset + length); - this.offset += length; - return result; - } +Client.prototype._hasActiveQuery = function () { + return this._activeQuery && this._activeQuery.state !== 'error' && this._activeQuery.state !== 'end' } -exports.BufferReader = BufferReader; -//# sourceMappingURL=buffer-reader.js.map -/***/ }), +Client.prototype._pulseQueryQueue = function (initialConnection) { + if (!this._connected) { + return + } + if (this._hasActiveQuery()) { + return + } + var query = this._queryQueue.shift() + if (!query) { + if (!initialConnection) { + this.emit('drain') + } + return + } + this._activeQuery = query + query.submit(this) + var self = this + query.once('_done', function () { + self._pulseQueryQueue() + }) +} -/***/ 19228: -/***/ ((__unused_webpack_module, exports) => { +// attempt to cancel an in-progress query +Client.prototype.cancel = function (query) { + if (this._activeQuery === query) { + this.native.cancel(function () {}) + } else if (this._queryQueue.indexOf(query) !== -1) { + this._queryQueue.splice(this._queryQueue.indexOf(query), 1) + } +} -"use strict"; +Client.prototype.setTypeParser = function (oid, format, parseFn) { + return this._types.setTypeParser(oid, format, parseFn) +} -//binary data writer tuned for encoding binary specific to the postgres binary protocol -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Writer = void 0; -class Writer { - constructor(size = 256) { - this.size = size; - this.offset = 5; - this.headerPosition = 0; - this.buffer = Buffer.allocUnsafe(size); - } - ensure(size) { - var remaining = this.buffer.length - this.offset; - if (remaining < size) { - var oldBuffer = this.buffer; - // exponential growth factor of around ~ 1.5 - // https://stackoverflow.com/questions/2269063/buffer-growth-strategy - var newSize = oldBuffer.length + (oldBuffer.length >> 1) + size; - this.buffer = Buffer.allocUnsafe(newSize); - oldBuffer.copy(this.buffer); - } - } - addInt32(num) { - this.ensure(4); - this.buffer[this.offset++] = (num >>> 24) & 0xff; - this.buffer[this.offset++] = (num >>> 16) & 0xff; - this.buffer[this.offset++] = (num >>> 8) & 0xff; - this.buffer[this.offset++] = (num >>> 0) & 0xff; - return this; - } - addInt16(num) { - this.ensure(2); - this.buffer[this.offset++] = (num >>> 8) & 0xff; - this.buffer[this.offset++] = (num >>> 0) & 0xff; - return this; - } - addCString(string) { - if (!string) { - this.ensure(1); - } - else { - var len = Buffer.byteLength(string); - this.ensure(len + 1); // +1 for null terminator - this.buffer.write(string, this.offset, 'utf-8'); - this.offset += len; - } - this.buffer[this.offset++] = 0; // null terminator - return this; - } - addString(string = '') { - var len = Buffer.byteLength(string); - this.ensure(len); - this.buffer.write(string, this.offset); - this.offset += len; - return this; - } - add(otherBuffer) { - this.ensure(otherBuffer.length); - otherBuffer.copy(this.buffer, this.offset); - this.offset += otherBuffer.length; - return this; - } - join(code) { - if (code) { - this.buffer[this.headerPosition] = code; - //length is everything in this packet minus the code - const length = this.offset - (this.headerPosition + 1); - this.buffer.writeInt32BE(length, this.headerPosition + 1); - } - return this.buffer.slice(code ? 0 : 5, this.offset); - } - flush(code) { - var result = this.join(code); - this.offset = 5; - this.headerPosition = 0; - this.buffer = Buffer.allocUnsafe(this.size); - return result; - } +Client.prototype.getTypeParser = function (oid, format) { + return this._types.getTypeParser(oid, format) } -exports.Writer = Writer; -//# sourceMappingURL=buffer-writer.js.map + /***/ }), -/***/ 21113: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 43148: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DatabaseError = exports.serialize = exports.parse = void 0; -const messages_1 = __nccwpck_require__(36359); -Object.defineProperty(exports, "DatabaseError", ({ enumerable: true, get: function () { return messages_1.DatabaseError; } })); -const serializer_1 = __nccwpck_require__(42439); -Object.defineProperty(exports, "serialize", ({ enumerable: true, get: function () { return serializer_1.serialize; } })); -const parser_1 = __nccwpck_require__(67912); -function parse(stream, callback) { - const parser = new parser_1.Parser(); - stream.on('data', (buffer) => parser.parse(buffer, callback)); - return new Promise((resolve) => stream.on('end', () => resolve())); -} -exports.parse = parse; -//# sourceMappingURL=index.js.map +module.exports = __nccwpck_require__(1094) + /***/ }), -/***/ 36359: -/***/ ((__unused_webpack_module, exports) => { +/***/ 91886: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.NoticeMessage = exports.DataRowMessage = exports.CommandCompleteMessage = exports.ReadyForQueryMessage = exports.NotificationResponseMessage = exports.BackendKeyDataMessage = exports.AuthenticationMD5Password = exports.ParameterStatusMessage = exports.ParameterDescriptionMessage = exports.RowDescriptionMessage = exports.Field = exports.CopyResponse = exports.CopyDataMessage = exports.DatabaseError = exports.copyDone = exports.emptyQuery = exports.replicationStart = exports.portalSuspended = exports.noData = exports.closeComplete = exports.bindComplete = exports.parseComplete = void 0; -exports.parseComplete = { - name: 'parseComplete', - length: 5, -}; -exports.bindComplete = { - name: 'bindComplete', - length: 5, -}; -exports.closeComplete = { - name: 'closeComplete', - length: 5, -}; -exports.noData = { - name: 'noData', - length: 5, -}; -exports.portalSuspended = { - name: 'portalSuspended', - length: 5, -}; -exports.replicationStart = { - name: 'replicationStart', - length: 4, -}; -exports.emptyQuery = { - name: 'emptyQuery', - length: 4, -}; -exports.copyDone = { - name: 'copyDone', - length: 4, -}; -class DatabaseError extends Error { - constructor(message, length, name) { - super(message); - this.length = length; - this.name = name; - } -} -exports.DatabaseError = DatabaseError; -class CopyDataMessage { - constructor(length, chunk) { - this.length = length; - this.chunk = chunk; - this.name = 'copyData'; - } -} -exports.CopyDataMessage = CopyDataMessage; -class CopyResponse { - constructor(length, name, binary, columnCount) { - this.length = length; - this.name = name; - this.binary = binary; - this.columnTypes = new Array(columnCount); - } -} -exports.CopyResponse = CopyResponse; -class Field { - constructor(name, tableID, columnID, dataTypeID, dataTypeSize, dataTypeModifier, format) { - this.name = name; - this.tableID = tableID; - this.columnID = columnID; - this.dataTypeID = dataTypeID; - this.dataTypeSize = dataTypeSize; - this.dataTypeModifier = dataTypeModifier; - this.format = format; - } -} -exports.Field = Field; -class RowDescriptionMessage { - constructor(length, fieldCount) { - this.length = length; - this.fieldCount = fieldCount; - this.name = 'rowDescription'; - this.fields = new Array(this.fieldCount); - } -} -exports.RowDescriptionMessage = RowDescriptionMessage; -class ParameterDescriptionMessage { - constructor(length, parameterCount) { - this.length = length; - this.parameterCount = parameterCount; - this.name = 'parameterDescription'; - this.dataTypeIDs = new Array(this.parameterCount); - } -} -exports.ParameterDescriptionMessage = ParameterDescriptionMessage; -class ParameterStatusMessage { - constructor(length, parameterName, parameterValue) { - this.length = length; - this.parameterName = parameterName; - this.parameterValue = parameterValue; - this.name = 'parameterStatus'; - } -} -exports.ParameterStatusMessage = ParameterStatusMessage; -class AuthenticationMD5Password { - constructor(length, salt) { - this.length = length; - this.salt = salt; - this.name = 'authenticationMD5Password'; - } -} -exports.AuthenticationMD5Password = AuthenticationMD5Password; -class BackendKeyDataMessage { - constructor(length, processID, secretKey) { - this.length = length; - this.processID = processID; - this.secretKey = secretKey; - this.name = 'backendKeyData'; - } -} -exports.BackendKeyDataMessage = BackendKeyDataMessage; -class NotificationResponseMessage { - constructor(length, processId, channel, payload) { - this.length = length; - this.processId = processId; - this.channel = channel; - this.payload = payload; - this.name = 'notification'; - } -} -exports.NotificationResponseMessage = NotificationResponseMessage; -class ReadyForQueryMessage { - constructor(length, status) { - this.length = length; - this.status = status; - this.name = 'readyForQuery'; - } -} -exports.ReadyForQueryMessage = ReadyForQueryMessage; -class CommandCompleteMessage { - constructor(length, text) { - this.length = length; - this.text = text; - this.name = 'commandComplete'; - } -} -exports.CommandCompleteMessage = CommandCompleteMessage; -class DataRowMessage { - constructor(length, fields) { - this.length = length; - this.fields = fields; - this.name = 'dataRow'; - this.fieldCount = fields.length; - } + +var EventEmitter = __nccwpck_require__(28614).EventEmitter +var util = __nccwpck_require__(31669) +var utils = __nccwpck_require__(36419) + +var NativeQuery = (module.exports = function (config, values, callback) { + EventEmitter.call(this) + config = utils.normalizeQueryConfig(config, values, callback) + this.text = config.text + this.values = config.values + this.name = config.name + this.callback = config.callback + this.state = 'new' + this._arrayMode = config.rowMode === 'array' + + // if the 'row' event is listened for + // then emit them as they come in + // without setting singleRowMode to true + // this has almost no meaning because libpq + // reads all rows into memory befor returning any + this._emitRowEvents = false + this.on( + 'newListener', + function (event) { + if (event === 'row') this._emitRowEvents = true + }.bind(this) + ) +}) + +util.inherits(NativeQuery, EventEmitter) + +var errorFieldMap = { + /* eslint-disable quote-props */ + sqlState: 'code', + statementPosition: 'position', + messagePrimary: 'message', + context: 'where', + schemaName: 'schema', + tableName: 'table', + columnName: 'column', + dataTypeName: 'dataType', + constraintName: 'constraint', + sourceFile: 'file', + sourceLine: 'line', + sourceFunction: 'routine', } -exports.DataRowMessage = DataRowMessage; -class NoticeMessage { - constructor(length, message) { - this.length = length; - this.message = message; - this.name = 'notice'; + +NativeQuery.prototype.handleError = function (err) { + // copy pq error fields into the error object + var fields = this.native.pq.resultErrorFields() + if (fields) { + for (var key in fields) { + var normalizedFieldName = errorFieldMap[key] || key + err[normalizedFieldName] = fields[key] } + } + if (this.callback) { + this.callback(err) + } else { + this.emit('error', err) + } + this.state = 'error' } -exports.NoticeMessage = NoticeMessage; -//# sourceMappingURL=messages.js.map -/***/ }), +NativeQuery.prototype.then = function (onSuccess, onFailure) { + return this._getPromise().then(onSuccess, onFailure) +} -/***/ 67912: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +NativeQuery.prototype.catch = function (callback) { + return this._getPromise().catch(callback) +} -"use strict"; +NativeQuery.prototype._getPromise = function () { + if (this._promise) return this._promise + this._promise = new Promise( + function (resolve, reject) { + this._once('end', resolve) + this._once('error', reject) + }.bind(this) + ) + return this._promise +} -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Parser = void 0; -const messages_1 = __nccwpck_require__(36359); -const buffer_reader_1 = __nccwpck_require__(10320); -const assert_1 = __importDefault(__nccwpck_require__(42357)); -// every message is prefixed with a single bye -const CODE_LENGTH = 1; -// every message has an int32 length which includes itself but does -// NOT include the code in the length -const LEN_LENGTH = 4; -const HEADER_LENGTH = CODE_LENGTH + LEN_LENGTH; -const emptyBuffer = Buffer.allocUnsafe(0); -class Parser { - constructor(opts) { - this.buffer = emptyBuffer; - this.bufferLength = 0; - this.bufferOffset = 0; - this.reader = new buffer_reader_1.BufferReader(); - if ((opts === null || opts === void 0 ? void 0 : opts.mode) === 'binary') { - throw new Error('Binary mode not supported yet'); - } - this.mode = (opts === null || opts === void 0 ? void 0 : opts.mode) || 'text'; - } - parse(buffer, callback) { - this.mergeBuffer(buffer); - const bufferFullLength = this.bufferOffset + this.bufferLength; - let offset = this.bufferOffset; - while (offset + HEADER_LENGTH <= bufferFullLength) { - // code is 1 byte long - it identifies the message type - const code = this.buffer[offset]; - // length is 1 Uint32BE - it is the length of the message EXCLUDING the code - const length = this.buffer.readUInt32BE(offset + CODE_LENGTH); - const fullMessageLength = CODE_LENGTH + length; - if (fullMessageLength + offset <= bufferFullLength) { - const message = this.handlePacket(offset + HEADER_LENGTH, code, length, this.buffer); - callback(message); - offset += fullMessageLength; - } - else { - break; - } - } - if (offset === bufferFullLength) { - // No more use for the buffer - this.buffer = emptyBuffer; - this.bufferLength = 0; - this.bufferOffset = 0; - } - else { - // Adjust the cursors of remainingBuffer - this.bufferLength = bufferFullLength - offset; - this.bufferOffset = offset; - } - } - mergeBuffer(buffer) { - if (this.bufferLength > 0) { - const newLength = this.bufferLength + buffer.byteLength; - const newFullLength = newLength + this.bufferOffset; - if (newFullLength > this.buffer.byteLength) { - // We can't concat the new buffer with the remaining one - let newBuffer; - if (newLength <= this.buffer.byteLength && this.bufferOffset >= this.bufferLength) { - // We can move the relevant part to the beginning of the buffer instead of allocating a new buffer - newBuffer = this.buffer; - } - else { - // Allocate a new larger buffer - let newBufferLength = this.buffer.byteLength * 2; - while (newLength >= newBufferLength) { - newBufferLength *= 2; - } - newBuffer = Buffer.allocUnsafe(newBufferLength); - } - // Move the remaining buffer to the new one - this.buffer.copy(newBuffer, 0, this.bufferOffset, this.bufferOffset + this.bufferLength); - this.buffer = newBuffer; - this.bufferOffset = 0; - } - // Concat the new buffer with the remaining one - buffer.copy(this.buffer, this.bufferOffset + this.bufferLength); - this.bufferLength = newLength; - } - else { - this.buffer = buffer; - this.bufferOffset = 0; - this.bufferLength = buffer.byteLength; - } - } - handlePacket(offset, code, length, bytes) { - switch (code) { - case 50 /* BindComplete */: - return messages_1.bindComplete; - case 49 /* ParseComplete */: - return messages_1.parseComplete; - case 51 /* CloseComplete */: - return messages_1.closeComplete; - case 110 /* NoData */: - return messages_1.noData; - case 115 /* PortalSuspended */: - return messages_1.portalSuspended; - case 99 /* CopyDone */: - return messages_1.copyDone; - case 87 /* ReplicationStart */: - return messages_1.replicationStart; - case 73 /* EmptyQuery */: - return messages_1.emptyQuery; - case 68 /* DataRow */: - return this.parseDataRowMessage(offset, length, bytes); - case 67 /* CommandComplete */: - return this.parseCommandCompleteMessage(offset, length, bytes); - case 90 /* ReadyForQuery */: - return this.parseReadyForQueryMessage(offset, length, bytes); - case 65 /* NotificationResponse */: - return this.parseNotificationMessage(offset, length, bytes); - case 82 /* AuthenticationResponse */: - return this.parseAuthenticationResponse(offset, length, bytes); - case 83 /* ParameterStatus */: - return this.parseParameterStatusMessage(offset, length, bytes); - case 75 /* BackendKeyData */: - return this.parseBackendKeyData(offset, length, bytes); - case 69 /* ErrorMessage */: - return this.parseErrorMessage(offset, length, bytes, 'error'); - case 78 /* NoticeMessage */: - return this.parseErrorMessage(offset, length, bytes, 'notice'); - case 84 /* RowDescriptionMessage */: - return this.parseRowDescriptionMessage(offset, length, bytes); - case 116 /* ParameterDescriptionMessage */: - return this.parseParameterDescriptionMessage(offset, length, bytes); - case 71 /* CopyIn */: - return this.parseCopyInMessage(offset, length, bytes); - case 72 /* CopyOut */: - return this.parseCopyOutMessage(offset, length, bytes); - case 100 /* CopyData */: - return this.parseCopyData(offset, length, bytes); - default: - assert_1.default.fail(`unknown message code: ${code.toString(16)}`); - } - } - parseReadyForQueryMessage(offset, length, bytes) { - this.reader.setBuffer(offset, bytes); - const status = this.reader.string(1); - return new messages_1.ReadyForQueryMessage(length, status); +NativeQuery.prototype.submit = function (client) { + this.state = 'running' + var self = this + this.native = client.native + client.native.arrayMode = this._arrayMode + + var after = function (err, rows, results) { + client.native.arrayMode = false + setImmediate(function () { + self.emit('_done') + }) + + // handle possible query error + if (err) { + return self.handleError(err) } - parseCommandCompleteMessage(offset, length, bytes) { - this.reader.setBuffer(offset, bytes); - const text = this.reader.cstring(); - return new messages_1.CommandCompleteMessage(length, text); + + // emit row events for each row in the result + if (self._emitRowEvents) { + if (results.length > 1) { + rows.forEach((rowOfRows, i) => { + rowOfRows.forEach((row) => { + self.emit('row', row, results[i]) + }) + }) + } else { + rows.forEach(function (row) { + self.emit('row', row, results) + }) + } } - parseCopyData(offset, length, bytes) { - const chunk = bytes.slice(offset, offset + (length - 4)); - return new messages_1.CopyDataMessage(length, chunk); + + // handle successful result + self.state = 'end' + self.emit('end', results) + if (self.callback) { + self.callback(null, results) } - parseCopyInMessage(offset, length, bytes) { - return this.parseCopyMessage(offset, length, bytes, 'copyInResponse'); + } + + if (process.domain) { + after = process.domain.bind(after) + } + + // named query + if (this.name) { + if (this.name.length > 63) { + /* eslint-disable no-console */ + console.error('Warning! Postgres only supports 63 characters for query names.') + console.error('You supplied %s (%s)', this.name, this.name.length) + console.error('This can cause conflicts and silent errors executing queries') + /* eslint-enable no-console */ } - parseCopyOutMessage(offset, length, bytes) { - return this.parseCopyMessage(offset, length, bytes, 'copyOutResponse'); + var values = (this.values || []).map(utils.prepareValue) + + // check if the client has already executed this named query + // if so...just execute it again - skip the planning phase + if (client.namedQueries[this.name]) { + if (this.text && client.namedQueries[this.name] !== this.text) { + const err = new Error(`Prepared statements must be unique - '${this.name}' was used for a different statement`) + return after(err) + } + return client.native.execute(this.name, values, after) } - parseCopyMessage(offset, length, bytes, messageName) { - this.reader.setBuffer(offset, bytes); - const isBinary = this.reader.byte() !== 0; - const columnCount = this.reader.int16(); - const message = new messages_1.CopyResponse(length, messageName, isBinary, columnCount); - for (let i = 0; i < columnCount; i++) { - message.columnTypes[i] = this.reader.int16(); - } - return message; + // plan the named query the first time, then execute it + return client.native.prepare(this.name, this.text, values.length, function (err) { + if (err) return after(err) + client.namedQueries[self.name] = self.text + return self.native.execute(self.name, values, after) + }) + } else if (this.values) { + if (!Array.isArray(this.values)) { + const err = new Error('Query values must be an array') + return after(err) } - parseNotificationMessage(offset, length, bytes) { - this.reader.setBuffer(offset, bytes); - const processId = this.reader.int32(); - const channel = this.reader.cstring(); - const payload = this.reader.cstring(); - return new messages_1.NotificationResponseMessage(length, processId, channel, payload); + var vals = this.values.map(utils.prepareValue) + client.native.query(this.text, vals, after) + } else { + client.native.query(this.text, after) + } +} + + +/***/ }), + +/***/ 31283: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { EventEmitter } = __nccwpck_require__(28614) + +const Result = __nccwpck_require__(26736) +const utils = __nccwpck_require__(36419) + +class Query extends EventEmitter { + constructor(config, values, callback) { + super() + + config = utils.normalizeQueryConfig(config, values, callback) + + this.text = config.text + this.values = config.values + this.rows = config.rows + this.types = config.types + this.name = config.name + this.binary = config.binary + // use unique portal name each time + this.portal = config.portal || '' + this.callback = config.callback + this._rowMode = config.rowMode + if (process.domain && config.callback) { + this.callback = process.domain.bind(config.callback) } - parseRowDescriptionMessage(offset, length, bytes) { - this.reader.setBuffer(offset, bytes); - const fieldCount = this.reader.int16(); - const message = new messages_1.RowDescriptionMessage(length, fieldCount); - for (let i = 0; i < fieldCount; i++) { - message.fields[i] = this.parseField(); - } - return message; + this._result = new Result(this._rowMode, this.types) + + // potential for multiple results + this._results = this._result + this.isPreparedStatement = false + this._canceledDueToError = false + this._promise = null + } + + requiresPreparation() { + // named queries must always be prepared + if (this.name) { + return true } - parseField() { - const name = this.reader.cstring(); - const tableID = this.reader.int32(); - const columnID = this.reader.int16(); - const dataTypeID = this.reader.int32(); - const dataTypeSize = this.reader.int16(); - const dataTypeModifier = this.reader.int32(); - const mode = this.reader.int16() === 0 ? 'text' : 'binary'; - return new messages_1.Field(name, tableID, columnID, dataTypeID, dataTypeSize, dataTypeModifier, mode); + // always prepare if there are max number of rows expected per + // portal execution + if (this.rows) { + return true } - parseParameterDescriptionMessage(offset, length, bytes) { - this.reader.setBuffer(offset, bytes); - const parameterCount = this.reader.int16(); - const message = new messages_1.ParameterDescriptionMessage(length, parameterCount); - for (let i = 0; i < parameterCount; i++) { - message.dataTypeIDs[i] = this.reader.int32(); - } - return message; + // don't prepare empty text queries + if (!this.text) { + return false } - parseDataRowMessage(offset, length, bytes) { - this.reader.setBuffer(offset, bytes); - const fieldCount = this.reader.int16(); - const fields = new Array(fieldCount); - for (let i = 0; i < fieldCount; i++) { - const len = this.reader.int32(); - // a -1 for length means the value of the field is null - fields[i] = len === -1 ? null : this.reader.string(len); - } - return new messages_1.DataRowMessage(length, fields); + // prepare if there are values + if (!this.values) { + return false } - parseParameterStatusMessage(offset, length, bytes) { - this.reader.setBuffer(offset, bytes); - const name = this.reader.cstring(); - const value = this.reader.cstring(); - return new messages_1.ParameterStatusMessage(length, name, value); + return this.values.length > 0 + } + + _checkForMultirow() { + // if we already have a result with a command property + // then we've already executed one query in a multi-statement simple query + // turn our results into an array of results + if (this._result.command) { + if (!Array.isArray(this._results)) { + this._results = [this._result] + } + this._result = new Result(this._rowMode, this.types) + this._results.push(this._result) } - parseBackendKeyData(offset, length, bytes) { - this.reader.setBuffer(offset, bytes); - const processID = this.reader.int32(); - const secretKey = this.reader.int32(); - return new messages_1.BackendKeyDataMessage(length, processID, secretKey); + } + + // associates row metadata from the supplied + // message with this query object + // metadata used when parsing row results + handleRowDescription(msg) { + this._checkForMultirow() + this._result.addFields(msg.fields) + this._accumulateRows = this.callback || !this.listeners('row').length + } + + handleDataRow(msg) { + let row + + if (this._canceledDueToError) { + return } - parseAuthenticationResponse(offset, length, bytes) { - this.reader.setBuffer(offset, bytes); - const code = this.reader.int32(); - // TODO(bmc): maybe better types here - const message = { - name: 'authenticationOk', - length, - }; - switch (code) { - case 0: // AuthenticationOk - break; - case 3: // AuthenticationCleartextPassword - if (message.length === 8) { - message.name = 'authenticationCleartextPassword'; - } - break; - case 5: // AuthenticationMD5Password - if (message.length === 12) { - message.name = 'authenticationMD5Password'; - const salt = this.reader.bytes(4); - return new messages_1.AuthenticationMD5Password(length, salt); - } - break; - case 10: // AuthenticationSASL - message.name = 'authenticationSASL'; - message.mechanisms = []; - let mechanism; - do { - mechanism = this.reader.cstring(); - if (mechanism) { - message.mechanisms.push(mechanism); - } - } while (mechanism); - break; - case 11: // AuthenticationSASLContinue - message.name = 'authenticationSASLContinue'; - message.data = this.reader.string(length - 8); - break; - case 12: // AuthenticationSASLFinal - message.name = 'authenticationSASLFinal'; - message.data = this.reader.string(length - 8); - break; - default: - throw new Error('Unknown authenticationOk message type ' + code); - } - return message; + + try { + row = this._result.parseRow(msg.fields) + } catch (err) { + this._canceledDueToError = err + return } - parseErrorMessage(offset, length, bytes, name) { - this.reader.setBuffer(offset, bytes); - const fields = {}; - let fieldType = this.reader.string(1); - while (fieldType !== '\0') { - fields[fieldType] = this.reader.cstring(); - fieldType = this.reader.string(1); - } - const messageValue = fields.M; - const message = name === 'notice' ? new messages_1.NoticeMessage(length, messageValue) : new messages_1.DatabaseError(messageValue, length, name); - message.severity = fields.S; - message.code = fields.C; - message.detail = fields.D; - message.hint = fields.H; - message.position = fields.P; - message.internalPosition = fields.p; - message.internalQuery = fields.q; - message.where = fields.W; - message.schema = fields.s; - message.table = fields.t; - message.column = fields.c; - message.dataType = fields.d; - message.constraint = fields.n; - message.file = fields.F; - message.line = fields.L; - message.routine = fields.R; - return message; + + this.emit('row', row, this._result) + if (this._accumulateRows) { + this._result.addRow(row) } -} -exports.Parser = Parser; -//# sourceMappingURL=parser.js.map + } -/***/ }), + handleCommandComplete(msg, connection) { + this._checkForMultirow() + this._result.addCommandComplete(msg) + // need to sync after each command complete of a prepared statement + // if we were using a row count which results in multiple calls to _getRows + if (this.rows) { + connection.sync() + } + } -/***/ 42439: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + // if a named prepared statement is created with empty query text + // the backend will send an emptyQuery message but *not* a command complete message + // since we pipeline sync immediately after execute we don't need to do anything here + // unless we have rows specified, in which case we did not pipeline the intial sync call + handleEmptyQuery(connection) { + if (this.rows) { + connection.sync() + } + } -"use strict"; + handleError(err, connection) { + // need to sync after error during a prepared statement + if (this._canceledDueToError) { + err = this._canceledDueToError + this._canceledDueToError = false + } + // if callback supplied do not emit error event as uncaught error + // events will bubble up to node process + if (this.callback) { + return this.callback(err) + } + this.emit('error', err) + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.serialize = void 0; -const buffer_writer_1 = __nccwpck_require__(19228); -const writer = new buffer_writer_1.Writer(); -const startup = (opts) => { - // protocol version - writer.addInt16(3).addInt16(0); - for (const key of Object.keys(opts)) { - writer.addCString(key).addCString(opts[key]); + handleReadyForQuery(con) { + if (this._canceledDueToError) { + return this.handleError(this._canceledDueToError, con) } - writer.addCString('client_encoding').addCString('UTF8'); - var bodyBuffer = writer.addCString('').flush(); - // this message is sent without a code - var length = bodyBuffer.length + 4; - return new buffer_writer_1.Writer().addInt32(length).add(bodyBuffer).flush(); -}; -const requestSsl = () => { - const response = Buffer.allocUnsafe(8); - response.writeInt32BE(8, 0); - response.writeInt32BE(80877103, 4); - return response; -}; -const password = (password) => { - return writer.addCString(password).flush(112 /* startup */); -}; -const sendSASLInitialResponseMessage = function (mechanism, initialResponse) { - // 0x70 = 'p' - writer.addCString(mechanism).addInt32(Buffer.byteLength(initialResponse)).addString(initialResponse); - return writer.flush(112 /* startup */); -}; -const sendSCRAMClientFinalMessage = function (additionalData) { - return writer.addString(additionalData).flush(112 /* startup */); -}; -const query = (text) => { - return writer.addCString(text).flush(81 /* query */); -}; -const emptyArray = []; -const parse = (query) => { - // expect something like this: - // { name: 'queryName', - // text: 'select * from blah', - // types: ['int8', 'bool'] } - // normalize missing query names to allow for null - const name = query.name || ''; - if (name.length > 63) { - /* eslint-disable no-console */ - console.error('Warning! Postgres only supports 63 characters for query names.'); - console.error('You supplied %s (%s)', name, name.length); - console.error('This can cause conflicts and silent errors executing queries'); - /* eslint-enable no-console */ + if (this.callback) { + this.callback(null, this._results) } - const types = query.types || emptyArray; - var len = types.length; - var buffer = writer - .addCString(name) // name of query - .addCString(query.text) // actual query text - .addInt16(len); - for (var i = 0; i < len; i++) { - buffer.addInt32(types[i]); + this.emit('end', this._results) + } + + submit(connection) { + if (typeof this.text !== 'string' && typeof this.name !== 'string') { + return new Error('A query must have either text or a name. Supplying neither is unsupported.') } - return writer.flush(80 /* parse */); -}; -const paramWriter = new buffer_writer_1.Writer(); -const writeValues = function (values, valueMapper) { - for (let i = 0; i < values.length; i++) { - const mappedVal = valueMapper ? valueMapper(values[i], i) : values[i]; - if (mappedVal == null) { - // add the param type (string) to the writer - writer.addInt16(0 /* STRING */); - // write -1 to the param writer to indicate null - paramWriter.addInt32(-1); - } - else if (mappedVal instanceof Buffer) { - // add the param type (binary) to the writer - writer.addInt16(1 /* BINARY */); - // add the buffer to the param writer - paramWriter.addInt32(mappedVal.length); - paramWriter.add(mappedVal); - } - else { - // add the param type (string) to the writer - writer.addInt16(0 /* STRING */); - paramWriter.addInt32(Buffer.byteLength(mappedVal)); - paramWriter.addString(mappedVal); - } + const previous = connection.parsedStatements[this.name] + if (this.text && previous && this.text !== previous) { + return new Error(`Prepared statements must be unique - '${this.name}' was used for a different statement`) } -}; -const bind = (config = {}) => { - // normalize config - const portal = config.portal || ''; - const statement = config.statement || ''; - const binary = config.binary || false; - const values = config.values || emptyArray; - const len = values.length; - writer.addCString(portal).addCString(statement); - writer.addInt16(len); - writeValues(values, config.valueMapper); - writer.addInt16(len); - writer.add(paramWriter.flush()); - // format code - writer.addInt16(binary ? 1 /* BINARY */ : 0 /* STRING */); - return writer.flush(66 /* bind */); -}; -const emptyExecute = Buffer.from([69 /* execute */, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x00, 0x00]); -const execute = (config) => { - // this is the happy path for most queries - if (!config || (!config.portal && !config.rows)) { - return emptyExecute; + if (this.values && !Array.isArray(this.values)) { + return new Error('Query values must be an array') } - const portal = config.portal || ''; - const rows = config.rows || 0; - const portalLength = Buffer.byteLength(portal); - const len = 4 + portalLength + 1 + 4; - // one extra bit for code - const buff = Buffer.allocUnsafe(1 + len); - buff[0] = 69 /* execute */; - buff.writeInt32BE(len, 1); - buff.write(portal, 5, 'utf-8'); - buff[portalLength + 5] = 0; // null terminate portal cString - buff.writeUInt32BE(rows, buff.length - 4); - return buff; -}; -const cancel = (processID, secretKey) => { - const buffer = Buffer.allocUnsafe(16); - buffer.writeInt32BE(16, 0); - buffer.writeInt16BE(1234, 4); - buffer.writeInt16BE(5678, 6); - buffer.writeInt32BE(processID, 8); - buffer.writeInt32BE(secretKey, 12); - return buffer; -}; -const cstringMessage = (code, string) => { - const stringLen = Buffer.byteLength(string); - const len = 4 + stringLen + 1; - // one extra bit for code - const buffer = Buffer.allocUnsafe(1 + len); - buffer[0] = code; - buffer.writeInt32BE(len, 1); - buffer.write(string, 5, 'utf-8'); - buffer[len] = 0; // null terminate cString - return buffer; -}; -const emptyDescribePortal = writer.addCString('P').flush(68 /* describe */); -const emptyDescribeStatement = writer.addCString('S').flush(68 /* describe */); -const describe = (msg) => { - return msg.name - ? cstringMessage(68 /* describe */, `${msg.type}${msg.name || ''}`) - : msg.type === 'P' - ? emptyDescribePortal - : emptyDescribeStatement; -}; -const close = (msg) => { - const text = `${msg.type}${msg.name || ''}`; - return cstringMessage(67 /* close */, text); -}; -const copyData = (chunk) => { - return writer.add(chunk).flush(100 /* copyFromChunk */); -}; -const copyFail = (message) => { - return cstringMessage(102 /* copyFail */, message); -}; -const codeOnlyBuffer = (code) => Buffer.from([code, 0x00, 0x00, 0x00, 0x04]); -const flushBuffer = codeOnlyBuffer(72 /* flush */); -const syncBuffer = codeOnlyBuffer(83 /* sync */); -const endBuffer = codeOnlyBuffer(88 /* end */); -const copyDoneBuffer = codeOnlyBuffer(99 /* copyDone */); -const serialize = { - startup, - password, - requestSsl, - sendSASLInitialResponseMessage, - sendSCRAMClientFinalMessage, - query, - parse, - bind, - execute, - describe, - close, - flush: () => flushBuffer, - sync: () => syncBuffer, - end: () => endBuffer, - copyData, - copyDone: () => copyDoneBuffer, - copyFail, - cancel, -}; -exports.serialize = serialize; -//# sourceMappingURL=serializer.js.map + if (this.requiresPreparation()) { + this.prepare(connection) + } else { + connection.query(this.text) + } + return null + } -/***/ }), + hasBeenParsed(connection) { + return this.name && connection.parsedStatements[this.name] + } -/***/ 77929: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + handlePortalSuspended(connection) { + this._getRows(connection, this.rows) + } -var textParsers = __nccwpck_require__(50977); -var binaryParsers = __nccwpck_require__(99849); -var arrayParser = __nccwpck_require__(83614); -var builtinTypes = __nccwpck_require__(45967); + _getRows(connection, rows) { + connection.execute({ + portal: this.portal, + rows: rows, + }) + // if we're not reading pages of rows send the sync command + // to indicate the pipeline is finished + if (!rows) { + connection.sync() + } else { + // otherwise flush the call out to read more rows + connection.flush() + } + } -exports.getTypeParser = getTypeParser; -exports.setTypeParser = setTypeParser; -exports.arrayParser = arrayParser; -exports.builtins = builtinTypes; + // http://developer.postgresql.org/pgdocs/postgres/protocol-flow.html#PROTOCOL-FLOW-EXT-QUERY + prepare(connection) { + // prepared statements need sync to be called after each command + // complete or when an error is encountered + this.isPreparedStatement = true -var typeParsers = { - text: {}, - binary: {} -}; + // TODO refactor this poor encapsulation + if (!this.hasBeenParsed(connection)) { + connection.parse({ + text: this.text, + name: this.name, + types: this.types, + }) + } -//the empty parse function -function noParse (val) { - return String(val); -}; + // because we're mapping user supplied values to + // postgres wire protocol compatible values it could + // throw an exception, so try/catch this section + try { + connection.bind({ + portal: this.portal, + statement: this.name, + values: this.values, + binary: this.binary, + valueMapper: utils.prepareValue, + }) + } catch (err) { + this.handleError(err, connection) + return + } -//returns a function used to convert a specific type (specified by -//oid) into a result javascript type -//note: the oid can be obtained via the following sql query: -//SELECT oid FROM pg_type WHERE typname = 'TYPE_NAME_HERE'; -function getTypeParser (oid, format) { - format = format || 'text'; - if (!typeParsers[format]) { - return noParse; + connection.describe({ + type: 'P', + name: this.portal || '', + }) + + this._getRows(connection, this.rows) } - return typeParsers[format][oid] || noParse; -}; -function setTypeParser (oid, format, parseFn) { - if(typeof format == 'function') { - parseFn = format; - format = 'text'; + handleCopyInResponse(connection) { + connection.sendCopyFail('No source stream defined') } - typeParsers[format][oid] = parseFn; -}; -textParsers.init(function(oid, converter) { - typeParsers.text[oid] = converter; -}); + // eslint-disable-next-line no-unused-vars + handleCopyData(msg, connection) { + // noop + } +} -binaryParsers.init(function(oid, converter) { - typeParsers.binary[oid] = converter; -}); +module.exports = Query /***/ }), -/***/ 83614: +/***/ 26736: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var array = __nccwpck_require__(34702); +"use strict"; -module.exports = { - create: function (source, transform) { - return { - parse: function() { - return array.parse(source, transform); + +var types = __nccwpck_require__(77929) + +var matchRegexp = /^([A-Za-z]+)(?: (\d+))?(?: (\d+))?/ + +// result object returned from query +// in the 'end' event and also +// passed as second argument to provided callback +class Result { + constructor(rowMode, types) { + this.command = null + this.rowCount = null + this.oid = null + this.rows = [] + this.fields = [] + this._parsers = undefined + this._types = types + this.RowCtor = null + this.rowAsArray = rowMode === 'array' + if (this.rowAsArray) { + this.parseRow = this._parseRowAsArray + } + } + + // adds a command complete message + addCommandComplete(msg) { + var match + if (msg.text) { + // pure javascript + match = matchRegexp.exec(msg.text) + } else { + // native bindings + match = matchRegexp.exec(msg.command) + } + if (match) { + this.command = match[1] + if (match[3]) { + // COMMMAND OID ROWS + this.oid = parseInt(match[2], 10) + this.rowCount = parseInt(match[3], 10) + } else if (match[2]) { + // COMMAND ROWS + this.rowCount = parseInt(match[2], 10) } - }; + } } -}; + + _parseRowAsArray(rowData) { + var row = new Array(rowData.length) + for (var i = 0, len = rowData.length; i < len; i++) { + var rawValue = rowData[i] + if (rawValue !== null) { + row[i] = this._parsers[i](rawValue) + } else { + row[i] = null + } + } + return row + } + + parseRow(rowData) { + var row = {} + for (var i = 0, len = rowData.length; i < len; i++) { + var rawValue = rowData[i] + var field = this.fields[i].name + if (rawValue !== null) { + row[field] = this._parsers[i](rawValue) + } else { + row[field] = null + } + } + return row + } + + addRow(row) { + this.rows.push(row) + } + + addFields(fieldDescriptions) { + // clears field definitions + // multiple query statements in 1 action can result in multiple sets + // of rowDescriptions...eg: 'select NOW(); select 1::int;' + // you need to reset the fields + this.fields = fieldDescriptions + if (this.fields.length) { + this._parsers = new Array(fieldDescriptions.length) + } + for (var i = 0; i < fieldDescriptions.length; i++) { + var desc = fieldDescriptions[i] + if (this._types) { + this._parsers[i] = this._types.getTypeParser(desc.dataTypeID, desc.format || 'text') + } else { + this._parsers[i] = types.getTypeParser(desc.dataTypeID, desc.format || 'text') + } + } + } +} + +module.exports = Result /***/ }), -/***/ 99849: +/***/ 19481: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var parseInt64 = __nccwpck_require__(41180); +"use strict"; -var parseBits = function(data, bits, offset, invert, callback) { - offset = offset || 0; - invert = invert || false; - callback = callback || function(lastValue, newValue, bits) { return (lastValue * Math.pow(2, bits)) + newValue; }; - var offsetBytes = offset >> 3; +const crypto = __nccwpck_require__(76417) - var inv = function(value) { - if (invert) { - return ~value & 0xff; - } +function startSession(mechanisms) { + if (mechanisms.indexOf('SCRAM-SHA-256') === -1) { + throw new Error('SASL: Only mechanism SCRAM-SHA-256 is currently supported') + } - return value; - }; + const clientNonce = crypto.randomBytes(18).toString('base64') - // read first (maybe partial) byte - var mask = 0xff; - var firstBits = 8 - (offset % 8); - if (bits < firstBits) { - mask = (0xff << (8 - bits)) & 0xff; - firstBits = bits; + return { + mechanism: 'SCRAM-SHA-256', + clientNonce, + response: 'n,,n=*,r=' + clientNonce, + message: 'SASLInitialResponse', } +} - if (offset) { - mask = mask >> (offset % 8); +function continueSession(session, password, serverData) { + if (session.message !== 'SASLInitialResponse') { + throw new Error('SASL: Last message was not SASLInitialResponse') } - var result = 0; - if ((offset % 8) + bits >= 8) { - result = callback(0, inv(data[offsetBytes]) & mask, firstBits); - } + const sv = extractVariablesFromFirstServerMessage(serverData) - // read bytes - var bytes = (bits + offset) >> 3; - for (var i = offsetBytes + 1; i < bytes; i++) { - result = callback(result, inv(data[i]), 8); + if (!sv.nonce.startsWith(session.clientNonce)) { + throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: server nonce does not start with client nonce') } - // bits to read, that are not a complete byte - var lastBits = (bits + offset) % 8; - if (lastBits > 0) { - result = callback(result, inv(data[bytes]) >> (8 - lastBits), lastBits); - } + var saltBytes = Buffer.from(sv.salt, 'base64') - return result; -}; + var saltedPassword = Hi(password, saltBytes, sv.iteration) -var parseFloatFromBits = function(data, precisionBits, exponentBits) { - var bias = Math.pow(2, exponentBits - 1) - 1; - var sign = parseBits(data, 1); - var exponent = parseBits(data, exponentBits, 1); + var clientKey = createHMAC(saltedPassword, 'Client Key') + var storedKey = crypto.createHash('sha256').update(clientKey).digest() - if (exponent === 0) { - return 0; - } + var clientFirstMessageBare = 'n=*,r=' + session.clientNonce + var serverFirstMessage = 'r=' + sv.nonce + ',s=' + sv.salt + ',i=' + sv.iteration - // parse mantissa - var precisionBitsCounter = 1; - var parsePrecisionBits = function(lastValue, newValue, bits) { - if (lastValue === 0) { - lastValue = 1; - } + var clientFinalMessageWithoutProof = 'c=biws,r=' + sv.nonce - for (var i = 1; i <= bits; i++) { - precisionBitsCounter /= 2; - if ((newValue & (0x1 << (bits - i))) > 0) { - lastValue += precisionBitsCounter; - } - } + var authMessage = clientFirstMessageBare + ',' + serverFirstMessage + ',' + clientFinalMessageWithoutProof - return lastValue; - }; + var clientSignature = createHMAC(storedKey, authMessage) + var clientProofBytes = xorBuffers(clientKey, clientSignature) + var clientProof = clientProofBytes.toString('base64') - var mantissa = parseBits(data, precisionBits, exponentBits + 1, false, parsePrecisionBits); + var serverKey = createHMAC(saltedPassword, 'Server Key') + var serverSignatureBytes = createHMAC(serverKey, authMessage) - // special cases - if (exponent == (Math.pow(2, exponentBits + 1) - 1)) { - if (mantissa === 0) { - return (sign === 0) ? Infinity : -Infinity; - } + session.message = 'SASLResponse' + session.serverSignature = serverSignatureBytes.toString('base64') + session.response = clientFinalMessageWithoutProof + ',p=' + clientProof +} - return NaN; +function finalizeSession(session, serverData) { + if (session.message !== 'SASLResponse') { + throw new Error('SASL: Last message was not SASLResponse') } - // normale number - return ((sign === 0) ? 1 : -1) * Math.pow(2, exponent - bias) * mantissa; -}; + var serverSignature -var parseInt16 = function(value) { - if (parseBits(value, 1) == 1) { - return -1 * (parseBits(value, 15, 1, true) + 1); + String(serverData) + .split(',') + .forEach(function (part) { + switch (part[0]) { + case 'v': + serverSignature = part.substr(2) + break + } + }) + + if (serverSignature !== session.serverSignature) { + throw new Error('SASL: SCRAM-SERVER-FINAL-MESSAGE: server signature does not match') } +} - return parseBits(value, 15, 1); -}; +function extractVariablesFromFirstServerMessage(data) { + var nonce, salt, iteration -var parseInt32 = function(value) { - if (parseBits(value, 1) == 1) { - return -1 * (parseBits(value, 31, 1, true) + 1); + String(data) + .split(',') + .forEach(function (part) { + switch (part[0]) { + case 'r': + nonce = part.substr(2) + break + case 's': + salt = part.substr(2) + break + case 'i': + iteration = parseInt(part.substr(2), 10) + break + } + }) + + if (!nonce) { + throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: nonce missing') } - return parseBits(value, 31, 1); -}; + if (!salt) { + throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: salt missing') + } -var parseFloat32 = function(value) { - return parseFloatFromBits(value, 23, 8); -}; + if (!iteration) { + throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: iteration missing') + } -var parseFloat64 = function(value) { - return parseFloatFromBits(value, 52, 11); -}; + return { + nonce, + salt, + iteration, + } +} -var parseNumeric = function(value) { - var sign = parseBits(value, 16, 32); - if (sign == 0xc000) { - return NaN; +function xorBuffers(a, b) { + if (!Buffer.isBuffer(a)) a = Buffer.from(a) + if (!Buffer.isBuffer(b)) b = Buffer.from(b) + var res = [] + if (a.length > b.length) { + for (var i = 0; i < b.length; i++) { + res.push(a[i] ^ b[i]) + } + } else { + for (var j = 0; j < a.length; j++) { + res.push(a[j] ^ b[j]) + } } + return Buffer.from(res) +} - var weight = Math.pow(10000, parseBits(value, 16, 16)); - var result = 0; +function createHMAC(key, msg) { + return crypto.createHmac('sha256', key).update(msg).digest() +} - var digits = []; - var ndigits = parseBits(value, 16); - for (var i = 0; i < ndigits; i++) { - result += parseBits(value, 16, 64 + (16 * i)) * weight; - weight /= 10000; +function Hi(password, saltBytes, iterations) { + var ui1 = createHMAC(password, Buffer.concat([saltBytes, Buffer.from([0, 0, 0, 1])])) + var ui = ui1 + for (var i = 0; i < iterations - 1; i++) { + ui1 = createHMAC(password, ui1) + ui = xorBuffers(ui, ui1) } - var scale = Math.pow(10, parseBits(value, 16, 48)); - return ((sign === 0) ? 1 : -1) * Math.round(result * scale) / scale; -}; + return ui +} -var parseDate = function(isUTC, value) { - var sign = parseBits(value, 1); - var rawValue = parseBits(value, 63, 1); +module.exports = { + startSession, + continueSession, + finalizeSession, +} - // discard usecs and shift from 2000 to 1970 - var result = new Date((((sign === 0) ? 1 : -1) * rawValue / 1000) + 946684800000); - if (!isUTC) { - result.setTime(result.getTime() + result.getTimezoneOffset() * 60000); - } +/***/ }), - // add microseconds to the date - result.usec = rawValue % 1000; - result.getMicroSeconds = function() { - return this.usec; - }; - result.setMicroSeconds = function(value) { - this.usec = value; - }; - result.getUTCMicroSeconds = function() { - return this.usec; - }; +/***/ 78873: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - return result; -}; +"use strict"; -var parseArray = function(value) { - var dim = parseBits(value, 32); - var flags = parseBits(value, 32, 32); - var elementType = parseBits(value, 32, 64); +var types = __nccwpck_require__(77929) - var offset = 96; - var dims = []; - for (var i = 0; i < dim; i++) { - // parse dimension - dims[i] = parseBits(value, 32, offset); - offset += 32; +function TypeOverrides(userTypes) { + this._types = userTypes || types + this.text = {} + this.binary = {} +} - // ignore lower bounds - offset += 32; +TypeOverrides.prototype.getOverrides = function (format) { + switch (format) { + case 'text': + return this.text + case 'binary': + return this.binary + default: + return {} } +} - var parseElement = function(elementType) { - // parse content length - var length = parseBits(value, 32, offset); - offset += 32; +TypeOverrides.prototype.setTypeParser = function (oid, format, parseFn) { + if (typeof format === 'function') { + parseFn = format + format = 'text' + } + this.getOverrides(format)[oid] = parseFn +} - // parse null values - if (length == 0xffffffff) { - return null; +TypeOverrides.prototype.getTypeParser = function (oid, format) { + format = format || 'text' + return this.getOverrides(format)[oid] || this._types.getTypeParser(oid, format) +} + +module.exports = TypeOverrides + + +/***/ }), + +/***/ 36419: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const crypto = __nccwpck_require__(76417) + +const defaults = __nccwpck_require__(26419) + +function escapeElement(elementRepresentation) { + var escaped = elementRepresentation.replace(/\\/g, '\\\\').replace(/"/g, '\\"') + + return '"' + escaped + '"' +} + +// convert a JS array to a postgres array literal +// uses comma separator so won't work for types like box that use +// a different array separator. +function arrayString(val) { + var result = '{' + for (var i = 0; i < val.length; i++) { + if (i > 0) { + result = result + ',' + } + if (val[i] === null || typeof val[i] === 'undefined') { + result = result + 'NULL' + } else if (Array.isArray(val[i])) { + result = result + arrayString(val[i]) + } else if (val[i] instanceof Buffer) { + result += '\\\\x' + val[i].toString('hex') + } else { + result += escapeElement(prepareValue(val[i])) } + } + result = result + '}' + return result +} - var result; - if ((elementType == 0x17) || (elementType == 0x14)) { - // int/bigint - result = parseBits(value, length * 8, offset); - offset += length * 8; - return result; +// converts values from javascript types +// to their 'raw' counterparts for use as a postgres parameter +// note: you can override this function to provide your own conversion mechanism +// for complex types, etc... +var prepareValue = function (val, seen) { + // null and undefined are both null for postgres + if (val == null) { + return null + } + if (val instanceof Buffer) { + return val + } + if (ArrayBuffer.isView(val)) { + var buf = Buffer.from(val.buffer, val.byteOffset, val.byteLength) + if (buf.length === val.byteLength) { + return buf } - else if (elementType == 0x19) { - // string - result = value.toString(this.encoding, offset >> 3, (offset += (length << 3)) >> 3); - return result; + return buf.slice(val.byteOffset, val.byteOffset + val.byteLength) // Node.js v4 does not support those Buffer.from params + } + if (val instanceof Date) { + if (defaults.parseInputDatesAsUTC) { + return dateToStringUTC(val) + } else { + return dateToString(val) } - else { - console.log("ERROR: ElementType not implemented: " + elementType); + } + if (Array.isArray(val)) { + return arrayString(val) + } + if (typeof val === 'object') { + return prepareObject(val, seen) + } + return val.toString() +} + +function prepareObject(val, seen) { + if (val && typeof val.toPostgres === 'function') { + seen = seen || [] + if (seen.indexOf(val) !== -1) { + throw new Error('circular reference detected while preparing "' + val + '" for query') } - }; + seen.push(val) - var parse = function(dimension, elementType) { - var array = []; - var i; + return prepareValue(val.toPostgres(prepareValue), seen) + } + return JSON.stringify(val) +} - if (dimension.length > 1) { - var count = dimension.shift(); - for (i = 0; i < count; i++) { - array[i] = parse(dimension, elementType); - } - dimension.unshift(count); - } - else { - for (i = 0; i < dimension[0]; i++) { - array[i] = parseElement(elementType); - } - } +function pad(number, digits) { + number = '' + number + while (number.length < digits) { + number = '0' + number + } + return number +} - return array; - }; +function dateToString(date) { + var offset = -date.getTimezoneOffset() - return parse(dims, elementType); -}; + var year = date.getFullYear() + var isBCYear = year < 1 + if (isBCYear) year = Math.abs(year) + 1 // negative years are 1 off their BC representation -var parseText = function(value) { - return value.toString('utf8'); -}; + var ret = + pad(year, 4) + + '-' + + pad(date.getMonth() + 1, 2) + + '-' + + pad(date.getDate(), 2) + + 'T' + + pad(date.getHours(), 2) + + ':' + + pad(date.getMinutes(), 2) + + ':' + + pad(date.getSeconds(), 2) + + '.' + + pad(date.getMilliseconds(), 3) -var parseBool = function(value) { - if(value === null) return null; - return (parseBits(value, 8) > 0); -}; + if (offset < 0) { + ret += '-' + offset *= -1 + } else { + ret += '+' + } -var init = function(register) { - register(20, parseInt64); - register(21, parseInt16); - register(23, parseInt32); - register(26, parseInt32); - register(1700, parseNumeric); - register(700, parseFloat32); - register(701, parseFloat64); - register(16, parseBool); - register(1114, parseDate.bind(null, false)); - register(1184, parseDate.bind(null, true)); - register(1000, parseArray); - register(1007, parseArray); - register(1016, parseArray); - register(1008, parseArray); - register(1009, parseArray); - register(25, parseText); -}; + ret += pad(Math.floor(offset / 60), 2) + ':' + pad(offset % 60, 2) + if (isBCYear) ret += ' BC' + return ret +} -module.exports = { - init: init -}; +function dateToStringUTC(date) { + var year = date.getUTCFullYear() + var isBCYear = year < 1 + if (isBCYear) year = Math.abs(year) + 1 // negative years are 1 off their BC representation + var ret = + pad(year, 4) + + '-' + + pad(date.getUTCMonth() + 1, 2) + + '-' + + pad(date.getUTCDate(), 2) + + 'T' + + pad(date.getUTCHours(), 2) + + ':' + + pad(date.getUTCMinutes(), 2) + + ':' + + pad(date.getUTCSeconds(), 2) + + '.' + + pad(date.getUTCMilliseconds(), 3) -/***/ }), + ret += '+00:00' + if (isBCYear) ret += ' BC' + return ret +} -/***/ 45967: -/***/ ((module) => { +function normalizeQueryConfig(config, values, callback) { + // can take in strings or config objects + config = typeof config === 'string' ? { text: config } : config + if (values) { + if (typeof values === 'function') { + config.callback = values + } else { + config.values = values + } + } + if (callback) { + config.callback = callback + } + return config +} -/** - * Following query was used to generate this file: +const md5 = function (string) { + return crypto.createHash('md5').update(string, 'utf-8').digest('hex') +} - SELECT json_object_agg(UPPER(PT.typname), PT.oid::int4 ORDER BY pt.oid) - FROM pg_type PT - WHERE typnamespace = (SELECT pgn.oid FROM pg_namespace pgn WHERE nspname = 'pg_catalog') -- Take only builting Postgres types with stable OID (extension types are not guaranted to be stable) - AND typtype = 'b' -- Only basic types - AND typelem = 0 -- Ignore aliases - AND typisdefined -- Ignore undefined types - */ +// See AuthenticationMD5Password at https://www.postgresql.org/docs/current/static/protocol-flow.html +const postgresMd5PasswordHash = function (user, password, salt) { + var inner = md5(password + user) + var outer = md5(Buffer.concat([Buffer.from(inner), salt])) + return 'md5' + outer +} module.exports = { - BOOL: 16, - BYTEA: 17, - CHAR: 18, - INT8: 20, - INT2: 21, - INT4: 23, - REGPROC: 24, - TEXT: 25, - OID: 26, - TID: 27, - XID: 28, - CID: 29, - JSON: 114, - XML: 142, - PG_NODE_TREE: 194, - SMGR: 210, - PATH: 602, - POLYGON: 604, - CIDR: 650, - FLOAT4: 700, - FLOAT8: 701, - ABSTIME: 702, - RELTIME: 703, - TINTERVAL: 704, - CIRCLE: 718, - MACADDR8: 774, - MONEY: 790, - MACADDR: 829, - INET: 869, - ACLITEM: 1033, - BPCHAR: 1042, - VARCHAR: 1043, - DATE: 1082, - TIME: 1083, - TIMESTAMP: 1114, - TIMESTAMPTZ: 1184, - INTERVAL: 1186, - TIMETZ: 1266, - BIT: 1560, - VARBIT: 1562, - NUMERIC: 1700, - REFCURSOR: 1790, - REGPROCEDURE: 2202, - REGOPER: 2203, - REGOPERATOR: 2204, - REGCLASS: 2205, - REGTYPE: 2206, - UUID: 2950, - TXID_SNAPSHOT: 2970, - PG_LSN: 3220, - PG_NDISTINCT: 3361, - PG_DEPENDENCIES: 3402, - TSVECTOR: 3614, - TSQUERY: 3615, - GTSVECTOR: 3642, - REGCONFIG: 3734, - REGDICTIONARY: 3769, - JSONB: 3802, - REGNAMESPACE: 4089, - REGROLE: 4096 -}; + prepareValue: function prepareValueWrapper(value) { + // this ensures that extra arguments do not get passed into prepareValue + // by accident, eg: from calling values.map(utils.prepareValue) + return prepareValue(value) + }, + normalizeQueryConfig, + postgresMd5PasswordHash, + md5, +} /***/ }), -/***/ 50977: +/***/ 16926: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var array = __nccwpck_require__(34702) -var arrayParser = __nccwpck_require__(83614); -var parseDate = __nccwpck_require__(13264); -var parseInterval = __nccwpck_require__(19991); -var parseByteA = __nccwpck_require__(79377); +"use strict"; -function allowNull (fn) { - return function nullAllowed (value) { - if (value === null) return value - return fn(value) - } -} -function parseBool (value) { - if (value === null) return value - return value === 'TRUE' || - value === 't' || - value === 'true' || - value === 'y' || - value === 'yes' || - value === 'on' || - value === '1'; -} +var path = __nccwpck_require__(85622) + , Stream = __nccwpck_require__(92413).Stream + , split = __nccwpck_require__(15000) + , util = __nccwpck_require__(31669) + , defaultPort = 5432 + , isWin = (process.platform === 'win32') + , warnStream = process.stderr +; -function parseBoolArray (value) { - if (!value) return null - return array.parse(value, parseBool) -} -function parseBaseTenInt (string) { - return parseInt(string, 10) +var S_IRWXG = 56 // 00070(8) + , S_IRWXO = 7 // 00007(8) + , S_IFMT = 61440 // 00170000(8) + , S_IFREG = 32768 // 0100000(8) +; +function isRegFile(mode) { + return ((mode & S_IFMT) == S_IFREG); } -function parseIntegerArray (value) { - if (!value) return null - return array.parse(value, allowNull(parseBaseTenInt)) -} +var fieldNames = [ 'host', 'port', 'database', 'user', 'password' ]; +var nrOfFields = fieldNames.length; +var passKey = fieldNames[ nrOfFields -1 ]; -function parseBigIntegerArray (value) { - if (!value) return null - return array.parse(value, allowNull(function (entry) { - return parseBigInteger(entry).trim() - })) -} -var parsePointArray = function(value) { - if(!value) { return null; } - var p = arrayParser.create(value, function(entry) { - if(entry !== null) { - entry = parsePoint(entry); +function warn() { + var isWritable = ( + warnStream instanceof Stream && + true === warnStream.writable + ); + + if (isWritable) { + var args = Array.prototype.slice.call(arguments).concat("\n"); + warnStream.write( util.format.apply(util, args) ); } - return entry; - }); +} - return p.parse(); -}; -var parseFloatArray = function(value) { - if(!value) { return null; } - var p = arrayParser.create(value, function(entry) { - if(entry !== null) { - entry = parseFloat(entry); +Object.defineProperty(module.exports, "isWin", ({ + get : function() { + return isWin; + } , + set : function(val) { + isWin = val; } - return entry; - }); +})); - return p.parse(); -}; -var parseStringArray = function(value) { - if(!value) { return null; } +module.exports.warnTo = function(stream) { + var old = warnStream; + warnStream = stream; + return old; +}; - var p = arrayParser.create(value); - return p.parse(); +module.exports.getFileName = function(rawEnv){ + var env = rawEnv || process.env; + var file = env.PGPASSFILE || ( + isWin ? + path.join( env.APPDATA || './' , 'postgresql', 'pgpass.conf' ) : + path.join( env.HOME || './', '.pgpass' ) + ); + return file; }; -var parseDateArray = function(value) { - if (!value) { return null; } +module.exports.usePgPass = function(stats, fname) { + if (Object.prototype.hasOwnProperty.call(process.env, 'PGPASSWORD')) { + return false; + } - var p = arrayParser.create(value, function(entry) { - if (entry !== null) { - entry = parseDate(entry); + if (isWin) { + return true; } - return entry; - }); - return p.parse(); -}; + fname = fname || ''; -var parseIntervalArray = function(value) { - if (!value) { return null; } + if (! isRegFile(stats.mode)) { + warn('WARNING: password file "%s" is not a plain file', fname); + return false; + } - var p = arrayParser.create(value, function(entry) { - if (entry !== null) { - entry = parseInterval(entry); + if (stats.mode & (S_IRWXG | S_IRWXO)) { + /* If password file is insecure, alert the user and ignore it. */ + warn('WARNING: password file "%s" has group or world access; permissions should be u=rw (0600) or less', fname); + return false; } - return entry; - }); - return p.parse(); + return true; }; -var parseByteAArray = function(value) { - if (!value) { return null; } - return array.parse(value, allowNull(parseByteA)); +var matcher = module.exports.match = function(connInfo, entry) { + return fieldNames.slice(0, -1).reduce(function(prev, field, idx){ + if (idx == 1) { + // the port + if ( Number( connInfo[field] || defaultPort ) === Number( entry[field] ) ) { + return prev && true; + } + } + return prev && ( + entry[field] === '*' || + entry[field] === connInfo[field] + ); + }, true); }; -var parseInteger = function(value) { - return parseInt(value, 10); -}; -var parseBigInteger = function(value) { - var valStr = String(value); - if (/^\d+$/.test(valStr)) { return valStr; } - return value; -}; +module.exports.getPassword = function(connInfo, stream, cb) { + var pass; + var lineStream = stream.pipe(split()); -var parseJsonArray = function(value) { - if (!value) { return null; } + function onLine(line) { + var entry = parseLine(line); + if (entry && isValidEntry(entry) && matcher(connInfo, entry)) { + pass = entry[passKey]; + lineStream.end(); // -> calls onEnd(), but pass is set now + } + } - return array.parse(value, allowNull(JSON.parse)); -}; + var onEnd = function() { + stream.destroy(); + cb(pass); + }; -var parsePoint = function(value) { - if (value[0] !== '(') { return null; } + var onErr = function(err) { + stream.destroy(); + warn('WARNING: error on reading file: %s', err); + cb(undefined); + }; - value = value.substring( 1, value.length - 1 ).split(','); + stream.on('error', onErr); + lineStream + .on('data', onLine) + .on('end', onEnd) + .on('error', onErr) + ; - return { - x: parseFloat(value[0]) - , y: parseFloat(value[1]) - }; }; -var parseCircle = function(value) { - if (value[0] !== '<' && value[1] !== '(') { return null; } - var point = '('; - var radius = ''; - var pointParsed = false; - for (var i = 2; i < value.length - 1; i++){ - if (!pointParsed) { - point += value[i]; +var parseLine = module.exports.parseLine = function(line) { + if (line.length < 11 || line.match(/^\s+#/)) { + return null; } - if (value[i] === ')') { - pointParsed = true; - continue; - } else if (!pointParsed) { - continue; - } + var curChar = ''; + var prevChar = ''; + var fieldIdx = 0; + var startIdx = 0; + var endIdx = 0; + var obj = {}; + var isLastField = false; + var addToObj = function(idx, i0, i1) { + var field = line.substring(i0, i1); - if (value[i] === ','){ - continue; - } + if (! Object.hasOwnProperty.call(process.env, 'PGPASS_NO_DEESCAPE')) { + field = field.replace(/\\([:\\])/g, '$1'); + } - radius += value[i]; - } - var result = parsePoint(point); - result.radius = parseFloat(radius); + obj[ fieldNames[idx] ] = field; + }; - return result; -}; + for (var i = 0 ; i < line.length-1 ; i += 1) { + curChar = line.charAt(i+1); + prevChar = line.charAt(i); -var init = function(register) { - register(20, parseBigInteger); // int8 - register(21, parseInteger); // int2 - register(23, parseInteger); // int4 - register(26, parseInteger); // oid - register(700, parseFloat); // float4/real - register(701, parseFloat); // float8/double - register(16, parseBool); - register(1082, parseDate); // date - register(1114, parseDate); // timestamp without timezone - register(1184, parseDate); // timestamp - register(600, parsePoint); // point - register(651, parseStringArray); // cidr[] - register(718, parseCircle); // circle - register(1000, parseBoolArray); - register(1001, parseByteAArray); - register(1005, parseIntegerArray); // _int2 - register(1007, parseIntegerArray); // _int4 - register(1028, parseIntegerArray); // oid[] - register(1016, parseBigIntegerArray); // _int8 - register(1017, parsePointArray); // point[] - register(1021, parseFloatArray); // _float4 - register(1022, parseFloatArray); // _float8 - register(1231, parseFloatArray); // _numeric - register(1014, parseStringArray); //char - register(1015, parseStringArray); //varchar - register(1008, parseStringArray); - register(1009, parseStringArray); - register(1040, parseStringArray); // macaddr[] - register(1041, parseStringArray); // inet[] - register(1115, parseDateArray); // timestamp without time zone[] - register(1182, parseDateArray); // _date - register(1185, parseDateArray); // timestamp with time zone[] - register(1186, parseInterval); - register(1187, parseIntervalArray); - register(17, parseByteA); - register(114, JSON.parse.bind(JSON)); // json - register(3802, JSON.parse.bind(JSON)); // jsonb - register(199, parseJsonArray); // json[] - register(3807, parseJsonArray); // jsonb[] - register(3907, parseStringArray); // numrange[] - register(2951, parseStringArray); // uuid[] - register(791, parseStringArray); // money[] - register(1183, parseStringArray); // time[] - register(1270, parseStringArray); // timetz[] -}; + isLastField = (fieldIdx == nrOfFields-1); -module.exports = { - init: init -}; + if (isLastField) { + addToObj(fieldIdx, startIdx); + break; + } + if (i >= 0 && curChar == ':' && prevChar !== '\\') { + addToObj(fieldIdx, startIdx, i+1); -/***/ }), + startIdx = i+2; + fieldIdx += 1; + } + } -/***/ 37143: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + obj = ( Object.keys(obj).length === nrOfFields ) ? obj : null; -"use strict"; + return obj; +}; -var EventEmitter = __nccwpck_require__(28614).EventEmitter -var util = __nccwpck_require__(31669) -var utils = __nccwpck_require__(36419) -var sasl = __nccwpck_require__(19481) -var pgPass = __nccwpck_require__(19713) -var TypeOverrides = __nccwpck_require__(78873) +var isValidEntry = module.exports.isValidEntry = function(entry){ + var rules = { + // host + 0 : function(x){ + return x.length > 0; + } , + // port + 1 : function(x){ + if (x === '*') { + return true; + } + x = Number(x); + return ( + isFinite(x) && + x > 0 && + x < 9007199254740992 && + Math.floor(x) === x + ); + } , + // database + 2 : function(x){ + return x.length > 0; + } , + // username + 3 : function(x){ + return x.length > 0; + } , + // password + 4 : function(x){ + return x.length > 0; + } + }; -var ConnectionParameters = __nccwpck_require__(19112) -var Query = __nccwpck_require__(31283) -var defaults = __nccwpck_require__(26419) -var Connection = __nccwpck_require__(62848) + for (var idx = 0 ; idx < fieldNames.length ; idx += 1) { + var rule = rules[idx]; + var value = entry[ fieldNames[idx] ] || ''; -class Client extends EventEmitter { - constructor(config) { - super() + var res = rule(value); + if (!res) { + return false; + } + } - this.connectionParameters = new ConnectionParameters(config) - this.user = this.connectionParameters.user - this.database = this.connectionParameters.database - this.port = this.connectionParameters.port - this.host = this.connectionParameters.host + return true; +}; - // "hiding" the password so it doesn't show up in stack traces - // or if the client is console.logged - Object.defineProperty(this, 'password', { - configurable: true, - enumerable: false, - writable: true, - value: this.connectionParameters.password, - }) - this.replication = this.connectionParameters.replication - var c = config || {} +/***/ }), - this._Promise = c.Promise || global.Promise - this._types = new TypeOverrides(c.types) - this._ending = false - this._connecting = false - this._connected = false - this._connectionError = false - this._queryable = true +/***/ 19713: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - this.connection = - c.connection || - new Connection({ - stream: c.stream, - ssl: this.connectionParameters.ssl, - keepAlive: c.keepAlive || false, - keepAliveInitialDelayMillis: c.keepAliveInitialDelayMillis || 0, - encoding: this.connectionParameters.client_encoding || 'utf8', - }) - this.queryQueue = [] - this.binary = c.binary || defaults.binary - this.processID = null - this.secretKey = null - this.ssl = this.connectionParameters.ssl || false - // As with Password, make SSL->Key (the private key) non-enumerable. - // It won't show up in stack traces - // or if the client is console.logged - if (this.ssl && this.ssl.key) { - Object.defineProperty(this.ssl, 'key', { - enumerable: false, - }) - } +"use strict"; - this._connectionTimeoutMillis = c.connectionTimeoutMillis || 0 - } - _errorAllQueries(err) { - const enqueueError = (query) => { - process.nextTick(() => { - query.handleError(err, this.connection) - }) - } +var path = __nccwpck_require__(85622) + , fs = __nccwpck_require__(35747) + , helper = __nccwpck_require__(16926) +; - if (this.activeQuery) { - enqueueError(this.activeQuery) - this.activeQuery = null - } - this.queryQueue.forEach(enqueueError) - this.queryQueue.length = 0 - } +module.exports = function(connInfo, cb) { + var file = helper.getFileName(); + + fs.stat(file, function(err, stat){ + if (err || !helper.usePgPass(stat, file)) { + return cb(undefined); + } - _connect(callback) { - var self = this - var con = this.connection - this._connectionCallback = callback + var st = fs.createReadStream(file); - if (this._connecting || this._connected) { - const err = new Error('Client has already been connected. You cannot reuse a client.') - process.nextTick(() => { - callback(err) - }) - return - } - this._connecting = true + helper.getPassword(connInfo, st, cb); + }); +}; - this.connectionTimeoutHandle - if (this._connectionTimeoutMillis > 0) { - this.connectionTimeoutHandle = setTimeout(() => { - con._ending = true - con.stream.destroy(new Error('timeout expired')) - }, this._connectionTimeoutMillis) - } +module.exports.warnTo = helper.warnTo; - if (this.host && this.host.indexOf('/') === 0) { - con.connect(this.host + '/.s.PGSQL.' + this.port) - } else { - con.connect(this.port, this.host) - } - // once connection is established send startup message - con.on('connect', function () { - if (self.ssl) { - con.requestSsl() - } else { - con.startup(self.getStartupConf()) - } - }) +/***/ }), - con.on('sslconnect', function () { - con.startup(self.getStartupConf()) - }) +/***/ 34702: +/***/ ((__unused_webpack_module, exports) => { - this._attachListeners(con) +"use strict"; - con.once('end', () => { - const error = this._ending ? new Error('Connection terminated') : new Error('Connection terminated unexpectedly') - clearTimeout(this.connectionTimeoutHandle) - this._errorAllQueries(error) +exports.parse = function (source, transform) { + return new ArrayParser(source, transform).parse() +} - if (!this._ending) { - // if the connection is ended without us calling .end() - // on this client then we have an unexpected disconnection - // treat this as an error unless we've already emitted an error - // during connection. - if (this._connecting && !this._connectionError) { - if (this._connectionCallback) { - this._connectionCallback(error) - } else { - this._handleErrorEvent(error) - } - } else if (!this._connectionError) { - this._handleErrorEvent(error) - } - } +class ArrayParser { + constructor (source, transform) { + this.source = source + this.transform = transform || identity + this.position = 0 + this.entries = [] + this.recorded = [] + this.dimension = 0 + } - process.nextTick(() => { - this.emit('end') - }) - }) + isEof () { + return this.position >= this.source.length } - connect(callback) { - if (callback) { - this._connect(callback) - return + nextCharacter () { + var character = this.source[this.position++] + if (character === '\\') { + return { + value: this.source[this.position++], + escaped: true + } + } + return { + value: character, + escaped: false } - - return new this._Promise((resolve, reject) => { - this._connect((error) => { - if (error) { - reject(error) - } else { - resolve() - } - }) - }) } - _attachListeners(con) { - // password request handling - con.on('authenticationCleartextPassword', this._handleAuthCleartextPassword.bind(this)) - // password request handling - con.on('authenticationMD5Password', this._handleAuthMD5Password.bind(this)) - // password request handling (SASL) - con.on('authenticationSASL', this._handleAuthSASL.bind(this)) - con.on('authenticationSASLContinue', this._handleAuthSASLContinue.bind(this)) - con.on('authenticationSASLFinal', this._handleAuthSASLFinal.bind(this)) - con.on('backendKeyData', this._handleBackendKeyData.bind(this)) - con.on('error', this._handleErrorEvent.bind(this)) - con.on('errorMessage', this._handleErrorMessage.bind(this)) - con.on('readyForQuery', this._handleReadyForQuery.bind(this)) - con.on('notice', this._handleNotice.bind(this)) - con.on('rowDescription', this._handleRowDescription.bind(this)) - con.on('dataRow', this._handleDataRow.bind(this)) - con.on('portalSuspended', this._handlePortalSuspended.bind(this)) - con.on('emptyQuery', this._handleEmptyQuery.bind(this)) - con.on('commandComplete', this._handleCommandComplete.bind(this)) - con.on('parseComplete', this._handleParseComplete.bind(this)) - con.on('copyInResponse', this._handleCopyInResponse.bind(this)) - con.on('copyData', this._handleCopyData.bind(this)) - con.on('notification', this._handleNotification.bind(this)) + record (character) { + this.recorded.push(character) } - // TODO(bmc): deprecate pgpass "built in" integration since this.password can be a function - // it can be supplied by the user if required - this is a breaking change! - _checkPgPass(cb) { - const con = this.connection - if (typeof this.password === 'function') { - this._Promise - .resolve() - .then(() => this.password()) - .then((pass) => { - if (pass !== undefined) { - if (typeof pass !== 'string') { - con.emit('error', new TypeError('Password must be a string')) - return - } - this.connectionParameters.password = this.password = pass - } else { - this.connectionParameters.password = this.password = null - } - cb() - }) - .catch((err) => { - con.emit('error', err) - }) - } else if (this.password !== null) { - cb() - } else { - pgPass(this.connectionParameters, (pass) => { - if (undefined !== pass) { - this.connectionParameters.password = this.password = pass - } - cb() - }) + newEntry (includeEmpty) { + var entry + if (this.recorded.length > 0 || includeEmpty) { + entry = this.recorded.join('') + if (entry === 'NULL' && !includeEmpty) { + entry = null + } + if (entry !== null) entry = this.transform(entry) + this.entries.push(entry) + this.recorded = [] } } - _handleAuthCleartextPassword(msg) { - this._checkPgPass(() => { - this.connection.password(this.password) - }) + consumeDimensions () { + if (this.source[0] === '[') { + while (!this.isEof()) { + var char = this.nextCharacter() + if (char.value === '=') break + } + } } - _handleAuthMD5Password(msg) { - this._checkPgPass(() => { - const hashedPassword = utils.postgresMd5PasswordHash(this.user, this.password, msg.salt) - this.connection.password(hashedPassword) - }) + parse (nested) { + var character, parser, quote + this.consumeDimensions() + while (!this.isEof()) { + character = this.nextCharacter() + if (character.value === '{' && !quote) { + this.dimension++ + if (this.dimension > 1) { + parser = new ArrayParser(this.source.substr(this.position - 1), this.transform) + this.entries.push(parser.parse(true)) + this.position += parser.position - 2 + } + } else if (character.value === '}' && !quote) { + this.dimension-- + if (!this.dimension) { + this.newEntry() + if (nested) return this.entries + } + } else if (character.value === '"' && !character.escaped) { + if (quote) this.newEntry(true) + quote = !quote + } else if (character.value === ',' && !quote) { + this.newEntry() + } else { + this.record(character.value) + } + } + if (this.dimension !== 0) { + throw new Error('array dimension not balanced') + } + return this.entries } +} - _handleAuthSASL(msg) { - this._checkPgPass(() => { - this.saslSession = sasl.startSession(msg.mechanisms) - this.connection.sendSASLInitialResponseMessage(this.saslSession.mechanism, this.saslSession.response) - }) - } +function identity (value) { + return value +} - _handleAuthSASLContinue(msg) { - sasl.continueSession(this.saslSession, this.password, msg.data) - this.connection.sendSCRAMClientFinalMessage(this.saslSession.response) - } - _handleAuthSASLFinal(msg) { - sasl.finalizeSession(this.saslSession, msg.data) - this.saslSession = null - } +/***/ }), - _handleBackendKeyData(msg) { - this.processID = msg.processID - this.secretKey = msg.secretKey - } +/***/ 79377: +/***/ ((module) => { - _handleReadyForQuery(msg) { - if (this._connecting) { - this._connecting = false - this._connected = true - clearTimeout(this.connectionTimeoutHandle) +"use strict"; - // process possible callback argument to Client#connect - if (this._connectionCallback) { - this._connectionCallback(null, this) - // remove callback for proper error handling - // after the connect event - this._connectionCallback = null - } - this.emit('connect') - } - const { activeQuery } = this - this.activeQuery = null - this.readyForQuery = true - if (activeQuery) { - activeQuery.handleReadyForQuery(this.connection) - } - this._pulseQueryQueue() - } - // if we receieve an error event or error message - // during the connection process we handle it here - _handleErrorWhileConnecting(err) { - if (this._connectionError) { - // TODO(bmc): this is swallowing errors - we shouldn't do this - return - } - this._connectionError = true - clearTimeout(this.connectionTimeoutHandle) - if (this._connectionCallback) { - return this._connectionCallback(err) +module.exports = function parseBytea (input) { + if (/^\\x/.test(input)) { + // new 'hex' style response (pg >9.0) + return new Buffer(input.substr(2), 'hex') + } + var output = '' + var i = 0 + while (i < input.length) { + if (input[i] !== '\\') { + output += input[i] + ++i + } else { + if (/[0-7]{3}/.test(input.substr(i + 1, 3))) { + output += String.fromCharCode(parseInt(input.substr(i + 1, 3), 8)) + i += 4 + } else { + var backslashes = 1 + while (i + backslashes < input.length && input[i + backslashes] === '\\') { + backslashes++ + } + for (var k = 0; k < Math.floor(backslashes / 2); ++k) { + output += '\\' + } + i += Math.floor(backslashes / 2) * 2 + } } - this.emit('error', err) } + return new Buffer(output, 'binary') +} - // if we're connected and we receive an error event from the connection - // this means the socket is dead - do a hard abort of all queries and emit - // the socket error on the client as well - _handleErrorEvent(err) { - if (this._connecting) { - return this._handleErrorWhileConnecting(err) - } - this._queryable = false - this._errorAllQueries(err) - this.emit('error', err) - } - // handle error messages from the postgres backend - _handleErrorMessage(msg) { - if (this._connecting) { - return this._handleErrorWhileConnecting(msg) - } - const activeQuery = this.activeQuery +/***/ }), - if (!activeQuery) { - this._handleErrorEvent(msg) - return - } +/***/ 13264: +/***/ ((module) => { - this.activeQuery = null - activeQuery.handleError(msg, this.connection) - } +"use strict"; - _handleRowDescription(msg) { - // delegate rowDescription to active query - this.activeQuery.handleRowDescription(msg) - } - _handleDataRow(msg) { - // delegate dataRow to active query - this.activeQuery.handleDataRow(msg) - } +var DATE_TIME = /(\d{1,})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2})(\.\d{1,})?.*?( BC)?$/ +var DATE = /^(\d{1,})-(\d{2})-(\d{2})( BC)?$/ +var TIME_ZONE = /([Z+-])(\d{2})?:?(\d{2})?:?(\d{2})?/ +var INFINITY = /^-?infinity$/ - _handlePortalSuspended(msg) { - // delegate portalSuspended to active query - this.activeQuery.handlePortalSuspended(this.connection) +module.exports = function parseDate (isoDate) { + if (INFINITY.test(isoDate)) { + // Capitalize to Infinity before passing to Number + return Number(isoDate.replace('i', 'I')) } + var matches = DATE_TIME.exec(isoDate) - _handleEmptyQuery(msg) { - // delegate emptyQuery to active query - this.activeQuery.handleEmptyQuery(this.connection) + if (!matches) { + // Force YYYY-MM-DD dates to be parsed as local time + return getDate(isoDate) || null } - _handleCommandComplete(msg) { - // delegate commandComplete to active query - this.activeQuery.handleCommandComplete(msg, this.connection) + var isBC = !!matches[8] + var year = parseInt(matches[1], 10) + if (isBC) { + year = bcYearToNegativeYear(year) } - _handleParseComplete(msg) { - // if a prepared statement has a name and properly parses - // we track that its already been executed so we don't parse - // it again on the same client - if (this.activeQuery.name) { - this.connection.parsedStatements[this.activeQuery.name] = this.activeQuery.text + var month = parseInt(matches[2], 10) - 1 + var day = matches[3] + var hour = parseInt(matches[4], 10) + var minute = parseInt(matches[5], 10) + var second = parseInt(matches[6], 10) + + var ms = matches[7] + ms = ms ? 1000 * parseFloat(ms) : 0 + + var date + var offset = timeZoneOffset(isoDate) + if (offset != null) { + date = new Date(Date.UTC(year, month, day, hour, minute, second, ms)) + + // Account for years from 0 to 99 being interpreted as 1900-1999 + // by Date.UTC / the multi-argument form of the Date constructor + if (is0To99(year)) { + date.setUTCFullYear(year) } - } - _handleCopyInResponse(msg) { - this.activeQuery.handleCopyInResponse(this.connection) + if (offset !== 0) { + date.setTime(date.getTime() - offset) + } + } else { + date = new Date(year, month, day, hour, minute, second, ms) + + if (is0To99(year)) { + date.setFullYear(year) + } } - _handleCopyData(msg) { - this.activeQuery.handleCopyData(msg, this.connection) + return date +} + +function getDate (isoDate) { + var matches = DATE.exec(isoDate) + if (!matches) { + return } - _handleNotification(msg) { - this.emit('notification', msg) + var year = parseInt(matches[1], 10) + var isBC = !!matches[4] + if (isBC) { + year = bcYearToNegativeYear(year) } - _handleNotice(msg) { - this.emit('notice', msg) + var month = parseInt(matches[2], 10) - 1 + var day = matches[3] + // YYYY-MM-DD will be parsed as local time + var date = new Date(year, month, day) + + if (is0To99(year)) { + date.setFullYear(year) } - getStartupConf() { - var params = this.connectionParameters + return date +} - var data = { - user: params.user, - database: params.database, - } +// match timezones: +// Z (UTC) +// -05 +// +06:30 +function timeZoneOffset (isoDate) { + if (isoDate.endsWith('+00')) { + return 0 + } - var appName = params.application_name || params.fallback_application_name - if (appName) { - data.application_name = appName - } - if (params.replication) { - data.replication = '' + params.replication - } - if (params.statement_timeout) { - data.statement_timeout = String(parseInt(params.statement_timeout, 10)) - } - if (params.idle_in_transaction_session_timeout) { - data.idle_in_transaction_session_timeout = String(parseInt(params.idle_in_transaction_session_timeout, 10)) - } - if (params.options) { - data.options = params.options - } + var zone = TIME_ZONE.exec(isoDate.split(' ')[1]) + if (!zone) return + var type = zone[1] - return data + if (type === 'Z') { + return 0 } + var sign = type === '-' ? -1 : 1 + var offset = parseInt(zone[2], 10) * 3600 + + parseInt(zone[3] || 0, 10) * 60 + + parseInt(zone[4] || 0, 10) - cancel(client, query) { - if (client.activeQuery === query) { - var con = this.connection + return offset * sign * 1000 +} - if (this.host && this.host.indexOf('/') === 0) { - con.connect(this.host + '/.s.PGSQL.' + this.port) - } else { - con.connect(this.port, this.host) - } +function bcYearToNegativeYear (year) { + // Account for numerical difference between representations of BC years + // See: https://github.com/bendrucker/postgres-date/issues/5 + return -(year - 1) +} - // once connection is established send cancel message - con.on('connect', function () { - con.cancel(client.processID, client.secretKey) - }) - } else if (client.queryQueue.indexOf(query) !== -1) { - client.queryQueue.splice(client.queryQueue.indexOf(query), 1) - } - } +function is0To99 (num) { + return num >= 0 && num < 100 +} - setTypeParser(oid, format, parseFn) { - return this._types.setTypeParser(oid, format, parseFn) - } - getTypeParser(oid, format) { - return this._types.getTypeParser(oid, format) - } +/***/ }), - // Ported from PostgreSQL 9.2.4 source code in src/interfaces/libpq/fe-exec.c - escapeIdentifier(str) { - return '"' + str.replace(/"/g, '""') + '"' - } +/***/ 19991: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // Ported from PostgreSQL 9.2.4 source code in src/interfaces/libpq/fe-exec.c - escapeLiteral(str) { - var hasBackslash = false - var escaped = "'" +"use strict"; - for (var i = 0; i < str.length; i++) { - var c = str[i] - if (c === "'") { - escaped += c + c - } else if (c === '\\') { - escaped += c + c - hasBackslash = true - } else { - escaped += c - } - } - escaped += "'" +var extend = __nccwpck_require__(98736) - if (hasBackslash === true) { - escaped = ' E' + escaped - } +module.exports = PostgresInterval - return escaped +function PostgresInterval (raw) { + if (!(this instanceof PostgresInterval)) { + return new PostgresInterval(raw) } + extend(this, parse(raw)) +} +var properties = ['seconds', 'minutes', 'hours', 'days', 'months', 'years'] +PostgresInterval.prototype.toPostgres = function () { + var filtered = properties.filter(this.hasOwnProperty, this) - _pulseQueryQueue() { - if (this.readyForQuery === true) { - this.activeQuery = this.queryQueue.shift() - if (this.activeQuery) { - this.readyForQuery = false - this.hasExecuted = true - - const queryError = this.activeQuery.submit(this.connection) - if (queryError) { - process.nextTick(() => { - this.activeQuery.handleError(queryError, this.connection) - this.readyForQuery = true - this._pulseQueryQueue() - }) - } - } else if (this.hasExecuted) { - this.activeQuery = null - this.emit('drain') - } - } + // In addition to `properties`, we need to account for fractions of seconds. + if (this.milliseconds && filtered.indexOf('seconds') < 0) { + filtered.push('seconds') } - query(config, values, callback) { - // can take in strings, config object or query object - var query - var result - var readTimeout - var readTimeoutTimer - var queryCallback + if (filtered.length === 0) return '0' + return filtered + .map(function (property) { + var value = this[property] || 0 - if (config === null || config === undefined) { - throw new TypeError('Client was passed a null or undefined query') - } else if (typeof config.submit === 'function') { - readTimeout = config.query_timeout || this.connectionParameters.query_timeout - result = query = config - if (typeof values === 'function') { - query.callback = query.callback || values - } - } else { - readTimeout = this.connectionParameters.query_timeout - query = new Query(config, values, callback) - if (!query.callback) { - result = new this._Promise((resolve, reject) => { - query.callback = (err, res) => (err ? reject(err) : resolve(res)) - }) + // Account for fractional part of seconds, + // remove trailing zeroes. + if (property === 'seconds' && this.milliseconds) { + value = (value + this.milliseconds / 1000).toFixed(6).replace(/\.?0+$/, '') } - } - if (readTimeout) { - queryCallback = query.callback + return value + ' ' + property + }, this) + .join(' ') +} - readTimeoutTimer = setTimeout(() => { - var error = new Error('Query read timeout') +var propertiesISOEquivalent = { + years: 'Y', + months: 'M', + days: 'D', + hours: 'H', + minutes: 'M', + seconds: 'S' +} +var dateProperties = ['years', 'months', 'days'] +var timeProperties = ['hours', 'minutes', 'seconds'] +// according to ISO 8601 +PostgresInterval.prototype.toISOString = PostgresInterval.prototype.toISO = function () { + var datePart = dateProperties + .map(buildProperty, this) + .join('') - process.nextTick(() => { - query.handleError(error, this.connection) - }) + var timePart = timeProperties + .map(buildProperty, this) + .join('') - queryCallback(error) + return 'P' + datePart + 'T' + timePart - // we already returned an error, - // just do nothing if query completes - query.callback = () => {} + function buildProperty (property) { + var value = this[property] || 0 - // Remove from queue - var index = this.queryQueue.indexOf(query) - if (index > -1) { - this.queryQueue.splice(index, 1) - } + // Account for fractional part of seconds, + // remove trailing zeroes. + if (property === 'seconds' && this.milliseconds) { + value = (value + this.milliseconds / 1000).toFixed(6).replace(/0+$/, '') + } - this._pulseQueryQueue() - }, readTimeout) + return value + propertiesISOEquivalent[property] + } +} - query.callback = (err, res) => { - clearTimeout(readTimeoutTimer) - queryCallback(err, res) - } - } +var NUMBER = '([+-]?\\d+)' +var YEAR = NUMBER + '\\s+years?' +var MONTH = NUMBER + '\\s+mons?' +var DAY = NUMBER + '\\s+days?' +var TIME = '([+-])?([\\d]*):(\\d\\d):(\\d\\d)\\.?(\\d{1,6})?' +var INTERVAL = new RegExp([YEAR, MONTH, DAY, TIME].map(function (regexString) { + return '(' + regexString + ')?' +}) + .join('\\s*')) - if (this.binary && !query.binary) { - query.binary = true - } +// Positions of values in regex match +var positions = { + years: 2, + months: 4, + days: 6, + hours: 9, + minutes: 10, + seconds: 11, + milliseconds: 12 +} +// We can use negative time +var negatives = ['hours', 'minutes', 'seconds', 'milliseconds'] - if (query._result && !query._result._types) { - query._result._types = this._types - } +function parseMilliseconds (fraction) { + // add omitted zeroes + var microseconds = fraction + '000000'.slice(fraction.length) + return parseInt(microseconds, 10) / 1000 +} - if (!this._queryable) { - process.nextTick(() => { - query.handleError(new Error('Client has encountered a connection error and is not queryable'), this.connection) - }) - return result - } +function parse (interval) { + if (!interval) return {} + var matches = INTERVAL.exec(interval) + var isNegative = matches[8] === '-' + return Object.keys(positions) + .reduce(function (parsed, property) { + var position = positions[property] + var value = matches[position] + // no empty string + if (!value) return parsed + // milliseconds are actually microseconds (up to 6 digits) + // with omitted trailing zeroes. + value = property === 'milliseconds' + ? parseMilliseconds(value) + : parseInt(value, 10) + // no zeros + if (!value) return parsed + if (isNegative && ~negatives.indexOf(property)) { + value *= -1 + } + parsed[property] = value + return parsed + }, {}) +} - if (this._ending) { - process.nextTick(() => { - query.handleError(new Error('Client was closed and is not queryable'), this.connection) - }) - return result - } - this.queryQueue.push(query) - this._pulseQueryQueue() - return result - } +/***/ }), - end(cb) { - this._ending = true +/***/ 47810: +/***/ ((module) => { - // if we have never connected, then end is a noop, callback immediately - if (!this.connection._connecting) { - if (cb) { - cb() - } else { - return this._Promise.resolve() - } - } +"use strict"; - if (this.activeQuery || !this._queryable) { - // if we have an active query we need to force a disconnect - // on the socket - otherwise a hung query could block end forever - this.connection.stream.destroy() - } else { - this.connection.end() - } - if (cb) { - this.connection.once('end', cb) - } else { - return new this._Promise((resolve) => { - this.connection.once('end', resolve) - }) +if (typeof process === 'undefined' || + !process.version || + process.version.indexOf('v0.') === 0 || + process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) { + module.exports = { nextTick: nextTick }; +} else { + module.exports = process +} + +function nextTick(fn, arg1, arg2, arg3) { + if (typeof fn !== 'function') { + throw new TypeError('"callback" argument must be a function'); + } + var len = arguments.length; + var args, i; + switch (len) { + case 0: + case 1: + return process.nextTick(fn); + case 2: + return process.nextTick(function afterTickOne() { + fn.call(null, arg1); + }); + case 3: + return process.nextTick(function afterTickTwo() { + fn.call(null, arg1, arg2); + }); + case 4: + return process.nextTick(function afterTickThree() { + fn.call(null, arg1, arg2, arg3); + }); + default: + args = new Array(len - 1); + i = 0; + while (i < args.length) { + args[i++] = arguments[i]; } + return process.nextTick(function afterTick() { + fn.apply(null, args); + }); } } -// expose a Query constructor -Client.Query = Query - -module.exports = Client /***/ }), -/***/ 19112: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 29975: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +/*eslint no-var:0, prefer-arrow-callback: 0, object-shorthand: 0 */ -var dns = __nccwpck_require__(40881) - -var defaults = __nccwpck_require__(26419) - -var parse = __nccwpck_require__(38961).parse // parses a connection string - -var val = function (key, config, envVar) { - if (envVar === undefined) { - envVar = process.env['PG' + key.toUpperCase()] - } else if (envVar === false) { - // do nothing ... use false - } else { - envVar = process.env[envVar] - } - return config[key] || envVar || defaults[key] -} +var Punycode = __nccwpck_require__(94213); -var readSSLConfigFromEnvironment = function () { - switch (process.env.PGSSLMODE) { - case 'disable': - return false - case 'prefer': - case 'require': - case 'verify-ca': - case 'verify-full': - return true - case 'no-verify': - return { rejectUnauthorized: false } - } - return defaults.ssl -} -// Convert arg to a string, surround in single quotes, and escape single quotes and backslashes -var quoteParamValue = function (value) { - return "'" + ('' + value).replace(/\\/g, '\\\\').replace(/'/g, "\\'") + "'" -} +var internals = {}; -var add = function (params, config, paramName) { - var value = config[paramName] - if (value !== undefined && value !== null) { - params.push(paramName + '=' + quoteParamValue(value)) - } -} -class ConnectionParameters { - constructor(config) { - // if a string is passed, it is a raw connection string so we parse it into a config - config = typeof config === 'string' ? parse(config) : config || {} +// +// Read rules from file. +// +internals.rules = __nccwpck_require__(2156).map(function (rule) { - // if the config has a connectionString defined, parse IT into the config we use - // this will override other default values with what is stored in connectionString - if (config.connectionString) { - config = Object.assign({}, config, parse(config.connectionString)) - } + return { + rule: rule, + suffix: rule.replace(/^(\*\.|\!)/, ''), + punySuffix: -1, + wildcard: rule.charAt(0) === '*', + exception: rule.charAt(0) === '!' + }; +}); - this.user = val('user', config) - this.database = val('database', config) - if (this.database === undefined) { - this.database = this.user - } +// +// Check is given string ends with `suffix`. +// +internals.endsWith = function (str, suffix) { - this.port = parseInt(val('port', config), 10) - this.host = val('host', config) + return str.indexOf(suffix, str.length - suffix.length) !== -1; +}; - // "hiding" the password so it doesn't show up in stack traces - // or if the client is console.logged - Object.defineProperty(this, 'password', { - configurable: true, - enumerable: false, - writable: true, - value: val('password', config), - }) - this.binary = val('binary', config) - this.options = val('options', config) +// +// Find rule for a given domain. +// +internals.findRule = function (domain) { - this.ssl = typeof config.ssl === 'undefined' ? readSSLConfigFromEnvironment() : config.ssl + var punyDomain = Punycode.toASCII(domain); + return internals.rules.reduce(function (memo, rule) { - if (typeof this.ssl === 'string') { - if (this.ssl === 'true') { - this.ssl = true - } - } - // support passing in ssl=no-verify via connection string - if (this.ssl === 'no-verify') { - this.ssl = { rejectUnauthorized: false } + if (rule.punySuffix === -1){ + rule.punySuffix = Punycode.toASCII(rule.suffix); } - if (this.ssl && this.ssl.key) { - Object.defineProperty(this.ssl, 'key', { - enumerable: false, - }) + if (!internals.endsWith(punyDomain, '.' + rule.punySuffix) && punyDomain !== rule.punySuffix) { + return memo; } + // This has been commented out as it never seems to run. This is because + // sub tlds always appear after their parents and we never find a shorter + // match. + //if (memo) { + // var memoSuffix = Punycode.toASCII(memo.suffix); + // if (memoSuffix.length >= punySuffix.length) { + // return memo; + // } + //} + return rule; + }, null); +}; - this.client_encoding = val('client_encoding', config) - this.replication = val('replication', config) - // a domain socket begins with '/' - this.isDomainSocket = !(this.host || '').indexOf('/') - this.application_name = val('application_name', config, 'PGAPPNAME') - this.fallback_application_name = val('fallback_application_name', config, false) - this.statement_timeout = val('statement_timeout', config, false) - this.idle_in_transaction_session_timeout = val('idle_in_transaction_session_timeout', config, false) - this.query_timeout = val('query_timeout', config, false) +// +// Error codes and messages. +// +exports.errorCodes = { + DOMAIN_TOO_SHORT: 'Domain name too short.', + DOMAIN_TOO_LONG: 'Domain name too long. It should be no more than 255 chars.', + LABEL_STARTS_WITH_DASH: 'Domain name label can not start with a dash.', + LABEL_ENDS_WITH_DASH: 'Domain name label can not end with a dash.', + LABEL_TOO_LONG: 'Domain name label should be at most 63 chars long.', + LABEL_TOO_SHORT: 'Domain name label should be at least 1 character long.', + LABEL_INVALID_CHARS: 'Domain name label can only contain alphanumeric characters or dashes.' +}; - if (config.connectionTimeoutMillis === undefined) { - this.connect_timeout = process.env.PGCONNECT_TIMEOUT || 0 - } else { - this.connect_timeout = Math.floor(config.connectionTimeoutMillis / 1000) - } - if (config.keepAlive === false) { - this.keepalives = 0 - } else if (config.keepAlive === true) { - this.keepalives = 1 - } +// +// Validate domain name and throw if not valid. +// +// From wikipedia: +// +// Hostnames are composed of series of labels concatenated with dots, as are all +// domain names. Each label must be between 1 and 63 characters long, and the +// entire hostname (including the delimiting dots) has a maximum of 255 chars. +// +// Allowed chars: +// +// * `a-z` +// * `0-9` +// * `-` but not as a starting or ending character +// * `.` as a separator for the textual portions of a domain name +// +// * http://en.wikipedia.org/wiki/Domain_name +// * http://en.wikipedia.org/wiki/Hostname +// +internals.validate = function (input) { - if (typeof config.keepAliveInitialDelayMillis === 'number') { - this.keepalives_idle = Math.floor(config.keepAliveInitialDelayMillis / 1000) - } - } + // Before we can validate we need to take care of IDNs with unicode chars. + var ascii = Punycode.toASCII(input); - getLibpqConnectionString(cb) { - var params = [] - add(params, this, 'user') - add(params, this, 'password') - add(params, this, 'port') - add(params, this, 'application_name') - add(params, this, 'fallback_application_name') - add(params, this, 'connect_timeout') - add(params, this, 'options') + if (ascii.length < 1) { + return 'DOMAIN_TOO_SHORT'; + } + if (ascii.length > 255) { + return 'DOMAIN_TOO_LONG'; + } - var ssl = typeof this.ssl === 'object' ? this.ssl : this.ssl ? { sslmode: this.ssl } : {} - add(params, ssl, 'sslmode') - add(params, ssl, 'sslca') - add(params, ssl, 'sslkey') - add(params, ssl, 'sslcert') - add(params, ssl, 'sslrootcert') + // Check each part's length and allowed chars. + var labels = ascii.split('.'); + var label; - if (this.database) { - params.push('dbname=' + quoteParamValue(this.database)) + for (var i = 0; i < labels.length; ++i) { + label = labels[i]; + if (!label.length) { + return 'LABEL_TOO_SHORT'; } - if (this.replication) { - params.push('replication=' + quoteParamValue(this.replication)) + if (label.length > 63) { + return 'LABEL_TOO_LONG'; } - if (this.host) { - params.push('host=' + quoteParamValue(this.host)) + if (label.charAt(0) === '-') { + return 'LABEL_STARTS_WITH_DASH'; } - if (this.isDomainSocket) { - return cb(null, params.join(' ')) + if (label.charAt(label.length - 1) === '-') { + return 'LABEL_ENDS_WITH_DASH'; } - if (this.client_encoding) { - params.push('client_encoding=' + quoteParamValue(this.client_encoding)) + if (!/^[a-z0-9\-]+$/.test(label)) { + return 'LABEL_INVALID_CHARS'; } - dns.lookup(this.host, function (err, address) { - if (err) return cb(err, null) - params.push('hostaddr=' + quoteParamValue(address)) - return cb(null, params.join(' ')) - }) } -} - -module.exports = ConnectionParameters - +}; -/***/ }), -/***/ 62848: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +// +// Public API +// -"use strict"; +// +// Parse domain. +// +exports.parse = function (input) { -var net = __nccwpck_require__(11631) -var EventEmitter = __nccwpck_require__(28614).EventEmitter + if (typeof input !== 'string') { + throw new TypeError('Domain name must be a string.'); + } -const { parse, serialize } = __nccwpck_require__(21113) + // Force domain to lowercase. + var domain = input.slice(0).toLowerCase(); -const flushBuffer = serialize.flush() -const syncBuffer = serialize.sync() -const endBuffer = serialize.end() + // Handle FQDN. + // TODO: Simply remove trailing dot? + if (domain.charAt(domain.length - 1) === '.') { + domain = domain.slice(0, domain.length - 1); + } -// TODO(bmc) support binary mode at some point -class Connection extends EventEmitter { - constructor(config) { - super() - config = config || {} - this.stream = config.stream || new net.Socket() - this._keepAlive = config.keepAlive - this._keepAliveInitialDelayMillis = config.keepAliveInitialDelayMillis - this.lastBuffer = false - this.parsedStatements = {} - this.ssl = config.ssl || false - this._ending = false - this._emitMessage = false - var self = this - this.on('newListener', function (eventName) { - if (eventName === 'message') { - self._emitMessage = true + // Validate and sanitise input. + var error = internals.validate(domain); + if (error) { + return { + input: input, + error: { + message: exports.errorCodes[error], + code: error } - }) + }; } - connect(port, host) { - var self = this - - this._connecting = true - this.stream.setNoDelay(true) - this.stream.connect(port, host) + var parsed = { + input: input, + tld: null, + sld: null, + domain: null, + subdomain: null, + listed: false + }; - this.stream.once('connect', function () { - if (self._keepAlive) { - self.stream.setKeepAlive(true, self._keepAliveInitialDelayMillis) - } - self.emit('connect') - }) + var domainParts = domain.split('.'); - const reportStreamError = function (error) { - // errors about disconnections should be ignored during disconnect - if (self._ending && (error.code === 'ECONNRESET' || error.code === 'EPIPE')) { - return - } - self.emit('error', error) - } - this.stream.on('error', reportStreamError) + // Non-Internet TLD + if (domainParts[domainParts.length - 1] === 'local') { + return parsed; + } - this.stream.on('close', function () { - self.emit('end') - }) + var handlePunycode = function () { - if (!this.ssl) { - return this.attachListeners(this.stream) + if (!/xn--/.test(domain)) { + return parsed; } + if (parsed.domain) { + parsed.domain = Punycode.toASCII(parsed.domain); + } + if (parsed.subdomain) { + parsed.subdomain = Punycode.toASCII(parsed.subdomain); + } + return parsed; + }; - this.stream.once('data', function (buffer) { - var responseCode = buffer.toString('utf8') - switch (responseCode) { - case 'S': // Server supports SSL connections, continue with a secure connection - break - case 'N': // Server does not support SSL connections - self.stream.end() - return self.emit('error', new Error('The server does not support SSL connections')) - default: - // Any other response byte, including 'E' (ErrorResponse) indicating a server error - self.stream.end() - return self.emit('error', new Error('There was an error establishing an SSL connection')) - } - var tls = __nccwpck_require__(4016) - const options = { - socket: self.stream, - } - - if (self.ssl !== true) { - Object.assign(options, self.ssl) + var rule = internals.findRule(domain); - if ('key' in self.ssl) { - options.key = self.ssl.key - } - } + // Unlisted tld. + if (!rule) { + if (domainParts.length < 2) { + return parsed; + } + parsed.tld = domainParts.pop(); + parsed.sld = domainParts.pop(); + parsed.domain = [parsed.sld, parsed.tld].join('.'); + if (domainParts.length) { + parsed.subdomain = domainParts.pop(); + } + return handlePunycode(); + } - if (net.isIP(host) === 0) { - options.servername = host - } - try { - self.stream = tls.connect(options) - } catch (err) { - return self.emit('error', err) - } - self.attachListeners(self.stream) - self.stream.on('error', reportStreamError) + // At this point we know the public suffix is listed. + parsed.listed = true; - self.emit('sslconnect') - }) - } + var tldParts = rule.suffix.split('.'); + var privateParts = domainParts.slice(0, domainParts.length - tldParts.length); - attachListeners(stream) { - stream.on('end', () => { - this.emit('end') - }) - parse(stream, (msg) => { - var eventName = msg.name === 'error' ? 'errorMessage' : msg.name - if (this._emitMessage) { - this.emit('message', msg) - } - this.emit(eventName, msg) - }) + if (rule.exception) { + privateParts.push(tldParts.shift()); } - requestSsl() { - this.stream.write(serialize.requestSsl()) - } + parsed.tld = tldParts.join('.'); - startup(config) { - this.stream.write(serialize.startup(config)) + if (!privateParts.length) { + return handlePunycode(); } - cancel(processID, secretKey) { - this._send(serialize.cancel(processID, secretKey)) + if (rule.wildcard) { + tldParts.unshift(privateParts.pop()); + parsed.tld = tldParts.join('.'); } - password(password) { - this._send(serialize.password(password)) + if (!privateParts.length) { + return handlePunycode(); } - sendSASLInitialResponseMessage(mechanism, initialResponse) { - this._send(serialize.sendSASLInitialResponseMessage(mechanism, initialResponse)) - } + parsed.sld = privateParts.pop(); + parsed.domain = [parsed.sld, parsed.tld].join('.'); - sendSCRAMClientFinalMessage(additionalData) { - this._send(serialize.sendSCRAMClientFinalMessage(additionalData)) + if (privateParts.length) { + parsed.subdomain = privateParts.join('.'); } - _send(buffer) { - if (!this.stream.writable) { - return false - } - return this.stream.write(buffer) - } + return handlePunycode(); +}; - query(text) { - this._send(serialize.query(text)) - } - // send parse message - parse(query) { - this._send(serialize.parse(query)) - } +// +// Get domain. +// +exports.get = function (domain) { - // send bind message - bind(config) { - this._send(serialize.bind(config)) + if (!domain) { + return null; } + return exports.parse(domain).domain || null; +}; - // send execute message - execute(config) { - this._send(serialize.execute(config)) - } - flush() { - if (this.stream.writable) { - this.stream.write(flushBuffer) - } - } +// +// Check whether domain belongs to a known public suffix. +// +exports.isValid = function (domain) { - sync() { - this._ending = true - this._send(flushBuffer) - this._send(syncBuffer) - } + var parsed = exports.parse(domain); + return Boolean(parsed.domain && parsed.listed); +}; - end() { - // 0x58 = 'X' - this._ending = true - if (!this._connecting || !this.stream.writable) { - this.stream.end() - return - } - return this.stream.write(endBuffer, () => { - this.stream.end() - }) - } - close(msg) { - this._send(serialize.close(msg)) - } +/***/ }), - describe(msg) { - this._send(serialize.describe(msg)) - } +/***/ 74907: +/***/ ((module) => { - sendCopyFromChunk(chunk) { - this._send(serialize.copyData(chunk)) - } +"use strict"; - endCopyFrom() { - this._send(serialize.copyDone()) - } - sendCopyFail(msg) { - this._send(serialize.copyFail(msg)) - } -} +var replace = String.prototype.replace; +var percentTwenties = /%20/g; -module.exports = Connection +module.exports = { + 'default': 'RFC3986', + formatters: { + RFC1738: function (value) { + return replace.call(value, percentTwenties, '+'); + }, + RFC3986: function (value) { + return value; + } + }, + RFC1738: 'RFC1738', + RFC3986: 'RFC3986' +}; /***/ }), -/***/ 26419: +/***/ 22760: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; +var stringify = __nccwpck_require__(79954); +var parse = __nccwpck_require__(33912); +var formats = __nccwpck_require__(74907); + module.exports = { - // database host. defaults to localhost - host: 'localhost', + formats: formats, + parse: parse, + stringify: stringify +}; - // database user's name - user: process.platform === 'win32' ? process.env.USERNAME : process.env.USER, - // name of database to connect - database: undefined, +/***/ }), - // database user's password - password: null, +/***/ 33912: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // a Postgres connection string to be used instead of setting individual connection items - // NOTE: Setting this value will cause it to override any other value (such as database or user) defined - // in the defaults object. - connectionString: undefined, +"use strict"; - // database port - port: 5432, - // number of rows to return at a time from a prepared statement's - // portal. 0 will return all rows at once - rows: 0, +var utils = __nccwpck_require__(72360); - // binary result mode - binary: false, +var has = Object.prototype.hasOwnProperty; - // Connection pool options - see https://github.com/brianc/node-pg-pool +var defaults = { + allowDots: false, + allowPrototypes: false, + arrayLimit: 20, + decoder: utils.decode, + delimiter: '&', + depth: 5, + parameterLimit: 1000, + plainObjects: false, + strictNullHandling: false +}; - // number of connections to use in connection pool - // 0 will disable connection pooling - max: 10, +var parseValues = function parseQueryStringValues(str, options) { + var obj = {}; + var cleanStr = options.ignoreQueryPrefix ? str.replace(/^\?/, '') : str; + var limit = options.parameterLimit === Infinity ? undefined : options.parameterLimit; + var parts = cleanStr.split(options.delimiter, limit); - // max milliseconds a client can go unused before it is removed - // from the pool and destroyed - idleTimeoutMillis: 30000, + for (var i = 0; i < parts.length; ++i) { + var part = parts[i]; - client_encoding: '', + var bracketEqualsPos = part.indexOf(']='); + var pos = bracketEqualsPos === -1 ? part.indexOf('=') : bracketEqualsPos + 1; - ssl: false, + var key, val; + if (pos === -1) { + key = options.decoder(part, defaults.decoder); + val = options.strictNullHandling ? null : ''; + } else { + key = options.decoder(part.slice(0, pos), defaults.decoder); + val = options.decoder(part.slice(pos + 1), defaults.decoder); + } + if (has.call(obj, key)) { + obj[key] = [].concat(obj[key]).concat(val); + } else { + obj[key] = val; + } + } - application_name: undefined, + return obj; +}; - fallback_application_name: undefined, +var parseObject = function (chain, val, options) { + var leaf = val; - options: undefined, + for (var i = chain.length - 1; i >= 0; --i) { + var obj; + var root = chain[i]; - parseInputDatesAsUTC: false, + if (root === '[]') { + obj = []; + obj = obj.concat(leaf); + } else { + obj = options.plainObjects ? Object.create(null) : {}; + var cleanRoot = root.charAt(0) === '[' && root.charAt(root.length - 1) === ']' ? root.slice(1, -1) : root; + var index = parseInt(cleanRoot, 10); + if ( + !isNaN(index) + && root !== cleanRoot + && String(index) === cleanRoot + && index >= 0 + && (options.parseArrays && index <= options.arrayLimit) + ) { + obj = []; + obj[index] = leaf; + } else { + obj[cleanRoot] = leaf; + } + } - // max milliseconds any query using this connection will execute for before timing out in error. - // false=unlimited - statement_timeout: false, + leaf = obj; + } - // Terminate any session with an open transaction that has been idle for longer than the specified duration in milliseconds - // false=unlimited - idle_in_transaction_session_timeout: false, + return leaf; +}; - // max milliseconds to wait for query to complete (client side) - query_timeout: false, +var parseKeys = function parseQueryStringKeys(givenKey, val, options) { + if (!givenKey) { + return; + } - connect_timeout: 0, + // Transform dot notation to bracket notation + var key = options.allowDots ? givenKey.replace(/\.([^.[]+)/g, '[$1]') : givenKey; - keepalives: 1, + // The regex chunks - keepalives_idle: 0, -} + var brackets = /(\[[^[\]]*])/; + var child = /(\[[^[\]]*])/g; -var pgTypes = __nccwpck_require__(77929) -// save default parsers -var parseBigInteger = pgTypes.getTypeParser(20, 'text') -var parseBigIntegerArray = pgTypes.getTypeParser(1016, 'text') + // Get the parent -// parse int8 so you can get your count values as actual numbers -module.exports.__defineSetter__('parseInt8', function (val) { - pgTypes.setTypeParser(20, 'text', val ? pgTypes.getTypeParser(23, 'text') : parseBigInteger) - pgTypes.setTypeParser(1016, 'text', val ? pgTypes.getTypeParser(1007, 'text') : parseBigIntegerArray) -}) + var segment = brackets.exec(key); + var parent = segment ? key.slice(0, segment.index) : key; + // Stash the parent if it exists -/***/ }), + var keys = []; + if (parent) { + // If we aren't using plain objects, optionally prefix keys + // that would overwrite object prototype properties + if (!options.plainObjects && has.call(Object.prototype, parent)) { + if (!options.allowPrototypes) { + return; + } + } -/***/ 44194: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + keys.push(parent); + } -"use strict"; + // Loop through children appending to the array until we hit depth + var i = 0; + while ((segment = child.exec(key)) !== null && i < options.depth) { + i += 1; + if (!options.plainObjects && has.call(Object.prototype, segment[1].slice(1, -1))) { + if (!options.allowPrototypes) { + return; + } + } + keys.push(segment[1]); + } -var Client = __nccwpck_require__(37143) -var defaults = __nccwpck_require__(26419) -var Connection = __nccwpck_require__(62848) -var Pool = __nccwpck_require__(99599) -const { DatabaseError } = __nccwpck_require__(21113) + // If there's a remainder, just add whatever is left -const poolFactory = (Client) => { - return class BoundPool extends Pool { - constructor(options) { - super(options, Client) + if (segment) { + keys.push('[' + key.slice(segment.index) + ']'); } - } -} -var PG = function (clientConstructor) { - this.defaults = defaults - this.Client = clientConstructor - this.Query = this.Client.Query - this.Pool = poolFactory(this.Client) - this._pools = [] - this.Connection = Connection - this.types = __nccwpck_require__(77929) - this.DatabaseError = DatabaseError -} + return parseObject(keys, val, options); +}; -if (typeof process.env.NODE_PG_FORCE_NATIVE !== 'undefined') { - module.exports = new PG(__nccwpck_require__(43148)) -} else { - module.exports = new PG(Client) +module.exports = function (str, opts) { + var options = opts ? utils.assign({}, opts) : {}; - // lazy require native module...the native module may not have installed - Object.defineProperty(module.exports, "native", ({ - configurable: true, - enumerable: false, - get() { - var native = null - try { - native = new PG(__nccwpck_require__(43148)) - } catch (err) { - if (err.code !== 'MODULE_NOT_FOUND') { - throw err - } - } + if (options.decoder !== null && options.decoder !== undefined && typeof options.decoder !== 'function') { + throw new TypeError('Decoder has to be a function.'); + } - // overwrite module.exports.native so that getter is never called again - Object.defineProperty(module.exports, "native", ({ - value: native, - })) + options.ignoreQueryPrefix = options.ignoreQueryPrefix === true; + options.delimiter = typeof options.delimiter === 'string' || utils.isRegExp(options.delimiter) ? options.delimiter : defaults.delimiter; + options.depth = typeof options.depth === 'number' ? options.depth : defaults.depth; + options.arrayLimit = typeof options.arrayLimit === 'number' ? options.arrayLimit : defaults.arrayLimit; + options.parseArrays = options.parseArrays !== false; + options.decoder = typeof options.decoder === 'function' ? options.decoder : defaults.decoder; + options.allowDots = typeof options.allowDots === 'boolean' ? options.allowDots : defaults.allowDots; + options.plainObjects = typeof options.plainObjects === 'boolean' ? options.plainObjects : defaults.plainObjects; + options.allowPrototypes = typeof options.allowPrototypes === 'boolean' ? options.allowPrototypes : defaults.allowPrototypes; + options.parameterLimit = typeof options.parameterLimit === 'number' ? options.parameterLimit : defaults.parameterLimit; + options.strictNullHandling = typeof options.strictNullHandling === 'boolean' ? options.strictNullHandling : defaults.strictNullHandling; - return native - }, - })) -} + if (str === '' || str === null || typeof str === 'undefined') { + return options.plainObjects ? Object.create(null) : {}; + } + + var tempObj = typeof str === 'string' ? parseValues(str, options) : str; + var obj = options.plainObjects ? Object.create(null) : {}; + + // Iterate over the keys and setup the new object + + var keys = Object.keys(tempObj); + for (var i = 0; i < keys.length; ++i) { + var key = keys[i]; + var newObj = parseKeys(key, tempObj[key], options); + obj = utils.merge(obj, newObj, options); + } + + return utils.compact(obj); +}; /***/ }), -/***/ 1094: +/***/ 79954: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -// eslint-disable-next-line -var Native = __nccwpck_require__(38889) -var TypeOverrides = __nccwpck_require__(78873) -var pkg = __nccwpck_require__(60257) -var EventEmitter = __nccwpck_require__(28614).EventEmitter -var util = __nccwpck_require__(31669) -var ConnectionParameters = __nccwpck_require__(19112) +var utils = __nccwpck_require__(72360); +var formats = __nccwpck_require__(74907); -var NativeQuery = __nccwpck_require__(91886) +var arrayPrefixGenerators = { + brackets: function brackets(prefix) { // eslint-disable-line func-name-matching + return prefix + '[]'; + }, + indices: function indices(prefix, key) { // eslint-disable-line func-name-matching + return prefix + '[' + key + ']'; + }, + repeat: function repeat(prefix) { // eslint-disable-line func-name-matching + return prefix; + } +}; -var Client = (module.exports = function (config) { - EventEmitter.call(this) - config = config || {} +var toISO = Date.prototype.toISOString; - this._Promise = config.Promise || global.Promise - this._types = new TypeOverrides(config.types) +var defaults = { + delimiter: '&', + encode: true, + encoder: utils.encode, + encodeValuesOnly: false, + serializeDate: function serializeDate(date) { // eslint-disable-line func-name-matching + return toISO.call(date); + }, + skipNulls: false, + strictNullHandling: false +}; - this.native = new Native({ - types: this._types, - }) +var stringify = function stringify( // eslint-disable-line func-name-matching + object, + prefix, + generateArrayPrefix, + strictNullHandling, + skipNulls, + encoder, + filter, + sort, + allowDots, + serializeDate, + formatter, + encodeValuesOnly +) { + var obj = object; + if (typeof filter === 'function') { + obj = filter(prefix, obj); + } else if (obj instanceof Date) { + obj = serializeDate(obj); + } else if (obj === null) { + if (strictNullHandling) { + return encoder && !encodeValuesOnly ? encoder(prefix, defaults.encoder) : prefix; + } - this._queryQueue = [] - this._ending = false - this._connecting = false - this._connected = false - this._queryable = true + obj = ''; + } - // keep these on the object for legacy reasons - // for the time being. TODO: deprecate all this jazz - var cp = (this.connectionParameters = new ConnectionParameters(config)) - this.user = cp.user + if (typeof obj === 'string' || typeof obj === 'number' || typeof obj === 'boolean' || utils.isBuffer(obj)) { + if (encoder) { + var keyValue = encodeValuesOnly ? prefix : encoder(prefix, defaults.encoder); + return [formatter(keyValue) + '=' + formatter(encoder(obj, defaults.encoder))]; + } + return [formatter(prefix) + '=' + formatter(String(obj))]; + } - // "hiding" the password so it doesn't show up in stack traces - // or if the client is console.logged - Object.defineProperty(this, 'password', { - configurable: true, - enumerable: false, - writable: true, - value: cp.password, - }) - this.database = cp.database - this.host = cp.host - this.port = cp.port + var values = []; - // a hash to hold named queries - this.namedQueries = {} -}) + if (typeof obj === 'undefined') { + return values; + } -Client.Query = NativeQuery + var objKeys; + if (Array.isArray(filter)) { + objKeys = filter; + } else { + var keys = Object.keys(obj); + objKeys = sort ? keys.sort(sort) : keys; + } -util.inherits(Client, EventEmitter) + for (var i = 0; i < objKeys.length; ++i) { + var key = objKeys[i]; -Client.prototype._errorAllQueries = function (err) { - const enqueueError = (query) => { - process.nextTick(() => { - query.native = this.native - query.handleError(err) - }) - } + if (skipNulls && obj[key] === null) { + continue; + } - if (this._hasActiveQuery()) { - enqueueError(this._activeQuery) - this._activeQuery = null - } + if (Array.isArray(obj)) { + values = values.concat(stringify( + obj[key], + generateArrayPrefix(prefix, key), + generateArrayPrefix, + strictNullHandling, + skipNulls, + encoder, + filter, + sort, + allowDots, + serializeDate, + formatter, + encodeValuesOnly + )); + } else { + values = values.concat(stringify( + obj[key], + prefix + (allowDots ? '.' + key : '[' + key + ']'), + generateArrayPrefix, + strictNullHandling, + skipNulls, + encoder, + filter, + sort, + allowDots, + serializeDate, + formatter, + encodeValuesOnly + )); + } + } - this._queryQueue.forEach(enqueueError) - this._queryQueue.length = 0 -} + return values; +}; -// connect to the backend -// pass an optional callback to be called once connected -// or with an error if there was a connection error -Client.prototype._connect = function (cb) { - var self = this +module.exports = function (object, opts) { + var obj = object; + var options = opts ? utils.assign({}, opts) : {}; - if (this._connecting) { - process.nextTick(() => cb(new Error('Client has already been connected. You cannot reuse a client.'))) - return - } + if (options.encoder !== null && options.encoder !== undefined && typeof options.encoder !== 'function') { + throw new TypeError('Encoder has to be a function.'); + } - this._connecting = true + var delimiter = typeof options.delimiter === 'undefined' ? defaults.delimiter : options.delimiter; + var strictNullHandling = typeof options.strictNullHandling === 'boolean' ? options.strictNullHandling : defaults.strictNullHandling; + var skipNulls = typeof options.skipNulls === 'boolean' ? options.skipNulls : defaults.skipNulls; + var encode = typeof options.encode === 'boolean' ? options.encode : defaults.encode; + var encoder = typeof options.encoder === 'function' ? options.encoder : defaults.encoder; + var sort = typeof options.sort === 'function' ? options.sort : null; + var allowDots = typeof options.allowDots === 'undefined' ? false : options.allowDots; + var serializeDate = typeof options.serializeDate === 'function' ? options.serializeDate : defaults.serializeDate; + var encodeValuesOnly = typeof options.encodeValuesOnly === 'boolean' ? options.encodeValuesOnly : defaults.encodeValuesOnly; + if (typeof options.format === 'undefined') { + options.format = formats['default']; + } else if (!Object.prototype.hasOwnProperty.call(formats.formatters, options.format)) { + throw new TypeError('Unknown format option provided.'); + } + var formatter = formats.formatters[options.format]; + var objKeys; + var filter; - this.connectionParameters.getLibpqConnectionString(function (err, conString) { - if (err) return cb(err) - self.native.connect(conString, function (err) { - if (err) { - self.native.end() - return cb(err) - } + if (typeof options.filter === 'function') { + filter = options.filter; + obj = filter('', obj); + } else if (Array.isArray(options.filter)) { + filter = options.filter; + objKeys = filter; + } - // set internal states to connected - self._connected = true + var keys = []; - // handle connection errors from the native layer - self.native.on('error', function (err) { - self._queryable = false - self._errorAllQueries(err) - self.emit('error', err) - }) + if (typeof obj !== 'object' || obj === null) { + return ''; + } - self.native.on('notification', function (msg) { - self.emit('notification', { - channel: msg.relname, - payload: msg.extra, - }) - }) + var arrayFormat; + if (options.arrayFormat in arrayPrefixGenerators) { + arrayFormat = options.arrayFormat; + } else if ('indices' in options) { + arrayFormat = options.indices ? 'indices' : 'repeat'; + } else { + arrayFormat = 'indices'; + } - // signal we are connected now - self.emit('connect') - self._pulseQueryQueue(true) + var generateArrayPrefix = arrayPrefixGenerators[arrayFormat]; - cb() - }) - }) -} + if (!objKeys) { + objKeys = Object.keys(obj); + } -Client.prototype.connect = function (callback) { - if (callback) { - this._connect(callback) - return - } + if (sort) { + objKeys.sort(sort); + } - return new this._Promise((resolve, reject) => { - this._connect((error) => { - if (error) { - reject(error) - } else { - resolve() - } - }) - }) -} + for (var i = 0; i < objKeys.length; ++i) { + var key = objKeys[i]; -// send a query to the server -// this method is highly overloaded to take -// 1) string query, optional array of parameters, optional function callback -// 2) object query with { -// string query -// optional array values, -// optional function callback instead of as a separate parameter -// optional string name to name & cache the query plan -// optional string rowMode = 'array' for an array of results -// } -Client.prototype.query = function (config, values, callback) { - var query - var result - var readTimeout - var readTimeoutTimer - var queryCallback + if (skipNulls && obj[key] === null) { + continue; + } - if (config === null || config === undefined) { - throw new TypeError('Client was passed a null or undefined query') - } else if (typeof config.submit === 'function') { - readTimeout = config.query_timeout || this.connectionParameters.query_timeout - result = query = config - // accept query(new Query(...), (err, res) => { }) style - if (typeof values === 'function') { - config.callback = values - } - } else { - readTimeout = this.connectionParameters.query_timeout - query = new NativeQuery(config, values, callback) - if (!query.callback) { - let resolveOut, rejectOut - result = new this._Promise((resolve, reject) => { - resolveOut = resolve - rejectOut = reject - }) - query.callback = (err, res) => (err ? rejectOut(err) : resolveOut(res)) + keys = keys.concat(stringify( + obj[key], + key, + generateArrayPrefix, + strictNullHandling, + skipNulls, + encode ? encoder : null, + filter, + sort, + allowDots, + serializeDate, + formatter, + encodeValuesOnly + )); } - } - if (readTimeout) { - queryCallback = query.callback + var joined = keys.join(delimiter); + var prefix = options.addQueryPrefix === true ? '?' : ''; - readTimeoutTimer = setTimeout(() => { - var error = new Error('Query read timeout') + return joined.length > 0 ? prefix + joined : ''; +}; - process.nextTick(() => { - query.handleError(error, this.connection) - }) - queryCallback(error) +/***/ }), - // we already returned an error, - // just do nothing if query completes - query.callback = () => {} +/***/ 72360: +/***/ ((module) => { - // Remove from queue - var index = this._queryQueue.indexOf(query) - if (index > -1) { - this._queryQueue.splice(index, 1) - } +"use strict"; - this._pulseQueryQueue() - }, readTimeout) - query.callback = (err, res) => { - clearTimeout(readTimeoutTimer) - queryCallback(err, res) - } - } +var has = Object.prototype.hasOwnProperty; - if (!this._queryable) { - query.native = this.native - process.nextTick(() => { - query.handleError(new Error('Client has encountered a connection error and is not queryable')) - }) - return result - } +var hexTable = (function () { + var array = []; + for (var i = 0; i < 256; ++i) { + array.push('%' + ((i < 16 ? '0' : '') + i.toString(16)).toUpperCase()); + } - if (this._ending) { - query.native = this.native - process.nextTick(() => { - query.handleError(new Error('Client was closed and is not queryable')) - }) - return result - } + return array; +}()); - this._queryQueue.push(query) - this._pulseQueryQueue() - return result -} +var compactQueue = function compactQueue(queue) { + var obj; -// disconnect from the backend server -Client.prototype.end = function (cb) { - var self = this + while (queue.length) { + var item = queue.pop(); + obj = item.obj[item.prop]; - this._ending = true + if (Array.isArray(obj)) { + var compacted = []; - if (!this._connected) { - this.once('connect', this.end.bind(this, cb)) - } - var result - if (!cb) { - result = new this._Promise(function (resolve, reject) { - cb = (err) => (err ? reject(err) : resolve()) - }) - } - this.native.end(function () { - self._errorAllQueries(new Error('Connection terminated')) + for (var j = 0; j < obj.length; ++j) { + if (typeof obj[j] !== 'undefined') { + compacted.push(obj[j]); + } + } - process.nextTick(() => { - self.emit('end') - if (cb) cb() - }) - }) - return result -} + item.obj[item.prop] = compacted; + } + } -Client.prototype._hasActiveQuery = function () { - return this._activeQuery && this._activeQuery.state !== 'error' && this._activeQuery.state !== 'end' -} + return obj; +}; -Client.prototype._pulseQueryQueue = function (initialConnection) { - if (!this._connected) { - return - } - if (this._hasActiveQuery()) { - return - } - var query = this._queryQueue.shift() - if (!query) { - if (!initialConnection) { - this.emit('drain') +var arrayToObject = function arrayToObject(source, options) { + var obj = options && options.plainObjects ? Object.create(null) : {}; + for (var i = 0; i < source.length; ++i) { + if (typeof source[i] !== 'undefined') { + obj[i] = source[i]; + } } - return - } - this._activeQuery = query - query.submit(this) - var self = this - query.once('_done', function () { - self._pulseQueryQueue() - }) -} -// attempt to cancel an in-progress query -Client.prototype.cancel = function (query) { - if (this._activeQuery === query) { - this.native.cancel(function () {}) - } else if (this._queryQueue.indexOf(query) !== -1) { - this._queryQueue.splice(this._queryQueue.indexOf(query), 1) - } -} + return obj; +}; -Client.prototype.setTypeParser = function (oid, format, parseFn) { - return this._types.setTypeParser(oid, format, parseFn) -} +var merge = function merge(target, source, options) { + if (!source) { + return target; + } -Client.prototype.getTypeParser = function (oid, format) { - return this._types.getTypeParser(oid, format) -} + if (typeof source !== 'object') { + if (Array.isArray(target)) { + target.push(source); + } else if (typeof target === 'object') { + if (options.plainObjects || options.allowPrototypes || !has.call(Object.prototype, source)) { + target[source] = true; + } + } else { + return [target, source]; + } + return target; + } -/***/ }), + if (typeof target !== 'object') { + return [target].concat(source); + } -/***/ 43148: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + var mergeTarget = target; + if (Array.isArray(target) && !Array.isArray(source)) { + mergeTarget = arrayToObject(target, options); + } -"use strict"; + if (Array.isArray(target) && Array.isArray(source)) { + source.forEach(function (item, i) { + if (has.call(target, i)) { + if (target[i] && typeof target[i] === 'object') { + target[i] = merge(target[i], item, options); + } else { + target.push(item); + } + } else { + target[i] = item; + } + }); + return target; + } -module.exports = __nccwpck_require__(1094) + return Object.keys(source).reduce(function (acc, key) { + var value = source[key]; + if (has.call(acc, key)) { + acc[key] = merge(acc[key], value, options); + } else { + acc[key] = value; + } + return acc; + }, mergeTarget); +}; -/***/ }), +var assign = function assignSingleSource(target, source) { + return Object.keys(source).reduce(function (acc, key) { + acc[key] = source[key]; + return acc; + }, target); +}; -/***/ 91886: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +var decode = function (str) { + try { + return decodeURIComponent(str.replace(/\+/g, ' ')); + } catch (e) { + return str; + } +}; -"use strict"; +var encode = function encode(str) { + // This code was originally written by Brian White (mscdex) for the io.js core querystring library. + // It has been adapted here for stricter adherence to RFC 3986 + if (str.length === 0) { + return str; + } + var string = typeof str === 'string' ? str : String(str); -var EventEmitter = __nccwpck_require__(28614).EventEmitter -var util = __nccwpck_require__(31669) -var utils = __nccwpck_require__(36419) + var out = ''; + for (var i = 0; i < string.length; ++i) { + var c = string.charCodeAt(i); -var NativeQuery = (module.exports = function (config, values, callback) { - EventEmitter.call(this) - config = utils.normalizeQueryConfig(config, values, callback) - this.text = config.text - this.values = config.values - this.name = config.name - this.callback = config.callback - this.state = 'new' - this._arrayMode = config.rowMode === 'array' + if ( + c === 0x2D // - + || c === 0x2E // . + || c === 0x5F // _ + || c === 0x7E // ~ + || (c >= 0x30 && c <= 0x39) // 0-9 + || (c >= 0x41 && c <= 0x5A) // a-z + || (c >= 0x61 && c <= 0x7A) // A-Z + ) { + out += string.charAt(i); + continue; + } - // if the 'row' event is listened for - // then emit them as they come in - // without setting singleRowMode to true - // this has almost no meaning because libpq - // reads all rows into memory befor returning any - this._emitRowEvents = false - this.on( - 'newListener', - function (event) { - if (event === 'row') this._emitRowEvents = true - }.bind(this) - ) -}) + if (c < 0x80) { + out = out + hexTable[c]; + continue; + } -util.inherits(NativeQuery, EventEmitter) + if (c < 0x800) { + out = out + (hexTable[0xC0 | (c >> 6)] + hexTable[0x80 | (c & 0x3F)]); + continue; + } -var errorFieldMap = { - /* eslint-disable quote-props */ - sqlState: 'code', - statementPosition: 'position', - messagePrimary: 'message', - context: 'where', - schemaName: 'schema', - tableName: 'table', - columnName: 'column', - dataTypeName: 'dataType', - constraintName: 'constraint', - sourceFile: 'file', - sourceLine: 'line', - sourceFunction: 'routine', -} + if (c < 0xD800 || c >= 0xE000) { + out = out + (hexTable[0xE0 | (c >> 12)] + hexTable[0x80 | ((c >> 6) & 0x3F)] + hexTable[0x80 | (c & 0x3F)]); + continue; + } -NativeQuery.prototype.handleError = function (err) { - // copy pq error fields into the error object - var fields = this.native.pq.resultErrorFields() - if (fields) { - for (var key in fields) { - var normalizedFieldName = errorFieldMap[key] || key - err[normalizedFieldName] = fields[key] + i += 1; + c = 0x10000 + (((c & 0x3FF) << 10) | (string.charCodeAt(i) & 0x3FF)); + out += hexTable[0xF0 | (c >> 18)] + + hexTable[0x80 | ((c >> 12) & 0x3F)] + + hexTable[0x80 | ((c >> 6) & 0x3F)] + + hexTable[0x80 | (c & 0x3F)]; } - } - if (this.callback) { - this.callback(err) - } else { - this.emit('error', err) - } - this.state = 'error' -} -NativeQuery.prototype.then = function (onSuccess, onFailure) { - return this._getPromise().then(onSuccess, onFailure) -} + return out; +}; -NativeQuery.prototype.catch = function (callback) { - return this._getPromise().catch(callback) -} +var compact = function compact(value) { + var queue = [{ obj: { o: value }, prop: 'o' }]; + var refs = []; -NativeQuery.prototype._getPromise = function () { - if (this._promise) return this._promise - this._promise = new Promise( - function (resolve, reject) { - this._once('end', resolve) - this._once('error', reject) - }.bind(this) - ) - return this._promise -} + for (var i = 0; i < queue.length; ++i) { + var item = queue[i]; + var obj = item.obj[item.prop]; -NativeQuery.prototype.submit = function (client) { - this.state = 'running' - var self = this - this.native = client.native - client.native.arrayMode = this._arrayMode + var keys = Object.keys(obj); + for (var j = 0; j < keys.length; ++j) { + var key = keys[j]; + var val = obj[key]; + if (typeof val === 'object' && val !== null && refs.indexOf(val) === -1) { + queue.push({ obj: obj, prop: key }); + refs.push(val); + } + } + } - var after = function (err, rows, results) { - client.native.arrayMode = false - setImmediate(function () { - self.emit('_done') - }) + return compactQueue(queue); +}; - // handle possible query error - if (err) { - return self.handleError(err) - } +var isRegExp = function isRegExp(obj) { + return Object.prototype.toString.call(obj) === '[object RegExp]'; +}; - // emit row events for each row in the result - if (self._emitRowEvents) { - if (results.length > 1) { - rows.forEach((rowOfRows, i) => { - rowOfRows.forEach((row) => { - self.emit('row', row, results[i]) - }) - }) - } else { - rows.forEach(function (row) { - self.emit('row', row, results) - }) - } +var isBuffer = function isBuffer(obj) { + if (obj === null || typeof obj === 'undefined') { + return false; } - // handle successful result - self.state = 'end' - self.emit('end', results) - if (self.callback) { - self.callback(null, results) - } - } + return !!(obj.constructor && obj.constructor.isBuffer && obj.constructor.isBuffer(obj)); +}; - if (process.domain) { - after = process.domain.bind(after) - } +module.exports = { + arrayToObject: arrayToObject, + assign: assign, + compact: compact, + decode: decode, + encode: encode, + isBuffer: isBuffer, + isRegExp: isRegExp, + merge: merge +}; - // named query - if (this.name) { - if (this.name.length > 63) { - /* eslint-disable no-console */ - console.error('Warning! Postgres only supports 63 characters for query names.') - console.error('You supplied %s (%s)', this.name, this.name.length) - console.error('This can cause conflicts and silent errors executing queries') - /* eslint-enable no-console */ - } - var values = (this.values || []).map(utils.prepareValue) - // check if the client has already executed this named query - // if so...just execute it again - skip the planning phase - if (client.namedQueries[this.name]) { - if (this.text && client.namedQueries[this.name] !== this.text) { - const err = new Error(`Prepared statements must be unique - '${this.name}' was used for a different statement`) - return after(err) - } - return client.native.execute(this.name, values, after) - } - // plan the named query the first time, then execute it - return client.native.prepare(this.name, this.text, values.length, function (err) { - if (err) return after(err) - client.namedQueries[self.name] = self.text - return self.native.execute(self.name, values, after) - }) - } else if (this.values) { - if (!Array.isArray(this.values)) { - const err = new Error('Query values must be an array') - return after(err) - } - var vals = this.values.map(utils.prepareValue) - client.native.query(this.text, vals, after) - } else { - client.native.query(this.text, after) - } -} +/***/ }), + +/***/ 41359: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. -/***/ }), -/***/ 31283: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +/**/ +var pna = __nccwpck_require__(47810); +/**/ -const { EventEmitter } = __nccwpck_require__(28614) +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) { + keys.push(key); + }return keys; +}; +/**/ -const Result = __nccwpck_require__(26736) -const utils = __nccwpck_require__(36419) +module.exports = Duplex; -class Query extends EventEmitter { - constructor(config, values, callback) { - super() +/**/ +var util = Object.create(__nccwpck_require__(95898)); +util.inherits = __nccwpck_require__(44124); +/**/ - config = utils.normalizeQueryConfig(config, values, callback) +var Readable = __nccwpck_require__(51433); +var Writable = __nccwpck_require__(26993); - this.text = config.text - this.values = config.values - this.rows = config.rows - this.types = config.types - this.name = config.name - this.binary = config.binary - // use unique portal name each time - this.portal = config.portal || '' - this.callback = config.callback - this._rowMode = config.rowMode - if (process.domain && config.callback) { - this.callback = process.domain.bind(config.callback) - } - this._result = new Result(this._rowMode, this.types) +util.inherits(Duplex, Readable); - // potential for multiple results - this._results = this._result - this.isPreparedStatement = false - this._canceledDueToError = false - this._promise = null +{ + // avoid scope creep, the keys array can then be collected + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; } +} - requiresPreparation() { - // named queries must always be prepared - if (this.name) { - return true - } - // always prepare if there are max number of rows expected per - // portal execution - if (this.rows) { - return true - } - // don't prepare empty text queries - if (!this.text) { - return false - } - // prepare if there are values - if (!this.values) { - return false - } - return this.values.length > 0 - } +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); - _checkForMultirow() { - // if we already have a result with a command property - // then we've already executed one query in a multi-statement simple query - // turn our results into an array of results - if (this._result.command) { - if (!Array.isArray(this._results)) { - this._results = [this._result] - } - this._result = new Result(this._rowMode, this.types) - this._results.push(this._result) - } - } + Readable.call(this, options); + Writable.call(this, options); - // associates row metadata from the supplied - // message with this query object - // metadata used when parsing row results - handleRowDescription(msg) { - this._checkForMultirow() - this._result.addFields(msg.fields) - this._accumulateRows = this.callback || !this.listeners('row').length - } + if (options && options.readable === false) this.readable = false; - handleDataRow(msg) { - let row + if (options && options.writable === false) this.writable = false; - if (this._canceledDueToError) { - return - } + this.allowHalfOpen = true; + if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; - try { - row = this._result.parseRow(msg.fields) - } catch (err) { - this._canceledDueToError = err - return - } + this.once('end', onend); +} - this.emit('row', row, this._result) - if (this._accumulateRows) { - this._result.addRow(row) - } +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; } +}); - handleCommandComplete(msg, connection) { - this._checkForMultirow() - this._result.addCommandComplete(msg) - // need to sync after each command complete of a prepared statement - // if we were using a row count which results in multiple calls to _getRows - if (this.rows) { - connection.sync() - } - } +// the no-half-open enforcer +function onend() { + // if we allow half-open state, or if the writable side ended, + // then we're ok. + if (this.allowHalfOpen || this._writableState.ended) return; - // if a named prepared statement is created with empty query text - // the backend will send an emptyQuery message but *not* a command complete message - // since we pipeline sync immediately after execute we don't need to do anything here - // unless we have rows specified, in which case we did not pipeline the intial sync call - handleEmptyQuery(connection) { - if (this.rows) { - connection.sync() - } - } + // no more data can be written. + // But allow more writes to happen in this tick. + pna.nextTick(onEndNT, this); +} - handleError(err, connection) { - // need to sync after error during a prepared statement - if (this._canceledDueToError) { - err = this._canceledDueToError - this._canceledDueToError = false - } - // if callback supplied do not emit error event as uncaught error - // events will bubble up to node process - if (this.callback) { - return this.callback(err) - } - this.emit('error', err) - } +function onEndNT(self) { + self.end(); +} - handleReadyForQuery(con) { - if (this._canceledDueToError) { - return this.handleError(this._canceledDueToError, con) +Object.defineProperty(Duplex.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined || this._writableState === undefined) { + return false; } - if (this.callback) { - this.callback(null, this._results) + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; } - this.emit('end', this._results) - } - submit(connection) { - if (typeof this.text !== 'string' && typeof this.name !== 'string') { - return new Error('A query must have either text or a name. Supplying neither is unsupported.') - } - const previous = connection.parsedStatements[this.name] - if (this.text && previous && this.text !== previous) { - return new Error(`Prepared statements must be unique - '${this.name}' was used for a different statement`) - } - if (this.values && !Array.isArray(this.values)) { - return new Error('Query values must be an array') - } - if (this.requiresPreparation()) { - this.prepare(connection) - } else { - connection.query(this.text) - } - return null + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; } +}); - hasBeenParsed(connection) { - return this.name && connection.parsedStatements[this.name] - } +Duplex.prototype._destroy = function (err, cb) { + this.push(null); + this.end(); - handlePortalSuspended(connection) { - this._getRows(connection, this.rows) - } + pna.nextTick(cb, err); +}; - _getRows(connection, rows) { - connection.execute({ - portal: this.portal, - rows: rows, - }) - // if we're not reading pages of rows send the sync command - // to indicate the pipeline is finished - if (!rows) { - connection.sync() - } else { - // otherwise flush the call out to read more rows - connection.flush() - } - } +/***/ }), - // http://developer.postgresql.org/pgdocs/postgres/protocol-flow.html#PROTOCOL-FLOW-EXT-QUERY - prepare(connection) { - // prepared statements need sync to be called after each command - // complete or when an error is encountered - this.isPreparedStatement = true +/***/ 81542: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // TODO refactor this poor encapsulation - if (!this.hasBeenParsed(connection)) { - connection.parse({ - text: this.text, - name: this.name, - types: this.types, - }) - } +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. - // because we're mapping user supplied values to - // postgres wire protocol compatible values it could - // throw an exception, so try/catch this section - try { - connection.bind({ - portal: this.portal, - statement: this.name, - values: this.values, - binary: this.binary, - valueMapper: utils.prepareValue, - }) - } catch (err) { - this.handleError(err, connection) - return - } +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. - connection.describe({ - type: 'P', - name: this.portal || '', - }) - this._getRows(connection, this.rows) - } - handleCopyInResponse(connection) { - connection.sendCopyFail('No source stream defined') - } +module.exports = PassThrough; - // eslint-disable-next-line no-unused-vars - handleCopyData(msg, connection) { - // noop - } -} +var Transform = __nccwpck_require__(34415); -module.exports = Query +/**/ +var util = Object.create(__nccwpck_require__(95898)); +util.inherits = __nccwpck_require__(44124); +/**/ + +util.inherits(PassThrough, Transform); + +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + Transform.call(this, options); +} + +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; /***/ }), -/***/ 26736: +/***/ 51433: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. -var types = __nccwpck_require__(77929) -var matchRegexp = /^([A-Za-z]+)(?: (\d+))?(?: (\d+))?/ +/**/ -// result object returned from query -// in the 'end' event and also -// passed as second argument to provided callback -class Result { - constructor(rowMode, types) { - this.command = null - this.rowCount = null - this.oid = null - this.rows = [] - this.fields = [] - this._parsers = undefined - this._types = types - this.RowCtor = null - this.rowAsArray = rowMode === 'array' - if (this.rowAsArray) { - this.parseRow = this._parseRowAsArray - } - } +var pna = __nccwpck_require__(47810); +/**/ - // adds a command complete message - addCommandComplete(msg) { - var match - if (msg.text) { - // pure javascript - match = matchRegexp.exec(msg.text) - } else { - // native bindings - match = matchRegexp.exec(msg.command) - } - if (match) { - this.command = match[1] - if (match[3]) { - // COMMMAND OID ROWS - this.oid = parseInt(match[2], 10) - this.rowCount = parseInt(match[3], 10) - } else if (match[2]) { - // COMMAND ROWS - this.rowCount = parseInt(match[2], 10) - } - } - } +module.exports = Readable; - _parseRowAsArray(rowData) { - var row = new Array(rowData.length) - for (var i = 0, len = rowData.length; i < len; i++) { - var rawValue = rowData[i] - if (rawValue !== null) { - row[i] = this._parsers[i](rawValue) - } else { - row[i] = null - } - } - return row - } +/**/ +var isArray = __nccwpck_require__(20893); +/**/ - parseRow(rowData) { - var row = {} - for (var i = 0, len = rowData.length; i < len; i++) { - var rawValue = rowData[i] - var field = this.fields[i].name - if (rawValue !== null) { - row[field] = this._parsers[i](rawValue) - } else { - row[field] = null - } - } - return row - } +/**/ +var Duplex; +/**/ - addRow(row) { - this.rows.push(row) - } +Readable.ReadableState = ReadableState; - addFields(fieldDescriptions) { - // clears field definitions - // multiple query statements in 1 action can result in multiple sets - // of rowDescriptions...eg: 'select NOW(); select 1::int;' - // you need to reset the fields - this.fields = fieldDescriptions - if (this.fields.length) { - this._parsers = new Array(fieldDescriptions.length) - } - for (var i = 0; i < fieldDescriptions.length; i++) { - var desc = fieldDescriptions[i] - if (this._types) { - this._parsers[i] = this._types.getTypeParser(desc.dataTypeID, desc.format || 'text') - } else { - this._parsers[i] = types.getTypeParser(desc.dataTypeID, desc.format || 'text') - } - } - } -} +/**/ +var EE = __nccwpck_require__(28614).EventEmitter; -module.exports = Result +var EElistenerCount = function (emitter, type) { + return emitter.listeners(type).length; +}; +/**/ +/**/ +var Stream = __nccwpck_require__(62387); +/**/ -/***/ }), +/**/ -/***/ 19481: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +var Buffer = __nccwpck_require__(21867).Buffer; +var OurUint8Array = global.Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} -"use strict"; +/**/ -const crypto = __nccwpck_require__(76417) +/**/ +var util = Object.create(__nccwpck_require__(95898)); +util.inherits = __nccwpck_require__(44124); +/**/ -function startSession(mechanisms) { - if (mechanisms.indexOf('SCRAM-SHA-256') === -1) { - throw new Error('SASL: Only mechanism SCRAM-SHA-256 is currently supported') - } +/**/ +var debugUtil = __nccwpck_require__(31669); +var debug = void 0; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function () {}; +} +/**/ - const clientNonce = crypto.randomBytes(18).toString('base64') +var BufferList = __nccwpck_require__(27053); +var destroyImpl = __nccwpck_require__(97049); +var StringDecoder; - return { - mechanism: 'SCRAM-SHA-256', - clientNonce, - response: 'n,,n=*,r=' + clientNonce, - message: 'SASLInitialResponse', - } +util.inherits(Readable, Stream); + +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; + +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; } -function continueSession(session, password, serverData) { - if (session.message !== 'SASLInitialResponse') { - throw new Error('SASL: Last message was not SASLInitialResponse') - } - if (typeof password !== 'string') { - throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: client password must be a string') - } - if (typeof serverData !== 'string') { - throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: serverData must be a string') - } +function ReadableState(options, stream) { + Duplex = Duplex || __nccwpck_require__(41359); - const sv = parseServerFirstMessage(serverData) + options = options || {}; - if (!sv.nonce.startsWith(session.clientNonce)) { - throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: server nonce does not start with client nonce') - } else if (sv.nonce.length === session.clientNonce.length) { - throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: server nonce is too short') - } + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; - var saltBytes = Buffer.from(sv.salt, 'base64') + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; - var saltedPassword = Hi(password, saltBytes, sv.iteration) + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; - var clientKey = hmacSha256(saltedPassword, 'Client Key') - var storedKey = sha256(clientKey) + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + var hwm = options.highWaterMark; + var readableHwm = options.readableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; - var clientFirstMessageBare = 'n=*,r=' + session.clientNonce - var serverFirstMessage = 'r=' + sv.nonce + ',s=' + sv.salt + ',i=' + sv.iteration + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm; - var clientFinalMessageWithoutProof = 'c=biws,r=' + sv.nonce + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); - var authMessage = clientFirstMessageBare + ',' + serverFirstMessage + ',' + clientFinalMessageWithoutProof + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; - var clientSignature = hmacSha256(storedKey, authMessage) - var clientProofBytes = xorBuffers(clientKey, clientSignature) - var clientProof = clientProofBytes.toString('base64') + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; - var serverKey = hmacSha256(saltedPassword, 'Server Key') - var serverSignatureBytes = hmacSha256(serverKey, authMessage) + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; - session.message = 'SASLResponse' - session.serverSignature = serverSignatureBytes.toString('base64') - session.response = clientFinalMessageWithoutProof + ',p=' + clientProof -} + // has it been destroyed + this.destroyed = false; -function finalizeSession(session, serverData) { - if (session.message !== 'SASLResponse') { - throw new Error('SASL: Last message was not SASLResponse') - } - if (typeof serverData !== 'string') { - throw new Error('SASL: SCRAM-SERVER-FINAL-MESSAGE: serverData must be a string') - } + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; - const { serverSignature } = parseServerFinalMessage(serverData) + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; - if (serverSignature !== session.serverSignature) { - throw new Error('SASL: SCRAM-SERVER-FINAL-MESSAGE: server signature does not match') - } -} + // if true, a maybeReadMore has been scheduled + this.readingMore = false; -/** - * printable = %x21-2B / %x2D-7E - * ;; Printable ASCII except ",". - * ;; Note that any "printable" is also - * ;; a valid "value". - */ -function isPrintableChars(text) { - if (typeof text !== 'string') { - throw new TypeError('SASL: text must be a string') + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = __nccwpck_require__(94841)/* .StringDecoder */ .s; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; } - return text - .split('') - .map((_, i) => text.charCodeAt(i)) - .every((c) => (c >= 0x21 && c <= 0x2b) || (c >= 0x2d && c <= 0x7e)) } -/** - * base64-char = ALPHA / DIGIT / "/" / "+" - * - * base64-4 = 4base64-char - * - * base64-3 = 3base64-char "=" - * - * base64-2 = 2base64-char "==" - * - * base64 = *base64-4 [base64-3 / base64-2] - */ -function isBase64(text) { - return /^(?:[a-zA-Z0-9+/]{4})*(?:[a-zA-Z0-9+/]{2}==|[a-zA-Z0-9+/]{3}=)?$/.test(text) -} +function Readable(options) { + Duplex = Duplex || __nccwpck_require__(41359); + + if (!(this instanceof Readable)) return new Readable(options); + + this._readableState = new ReadableState(options, this); + + // legacy + this.readable = true; -function parseAttributePairs(text) { - if (typeof text !== 'string') { - throw new TypeError('SASL: attribute pairs text must be a string') + if (options) { + if (typeof options.read === 'function') this._read = options.read; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; } - return new Map( - text.split(',').map((attrValue) => { - if (!/^.=/.test(attrValue)) { - throw new Error('SASL: Invalid attribute pair entry') - } - const name = attrValue[0] - const value = attrValue.substring(2) - return [name, value] - }) - ) + Stream.call(this); } -function parseServerFirstMessage(data) { - const attrPairs = parseAttributePairs(data) +Object.defineProperty(Readable.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } - const nonce = attrPairs.get('r') - if (!nonce) { - throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: nonce missing') - } else if (!isPrintableChars(nonce)) { - throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: nonce must only contain printable characters') - } - const salt = attrPairs.get('s') - if (!salt) { - throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: salt missing') - } else if (!isBase64(salt)) { - throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: salt must be base64') + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; } - const iterationText = attrPairs.get('i') - if (!iterationText) { - throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: iteration missing') - } else if (!/^[1-9][0-9]*$/.test(iterationText)) { - throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: invalid iteration count') +}); + +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + this.push(null); + cb(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; } - const iteration = parseInt(iterationText, 10) - return { - nonce, - salt, - iteration, + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; + +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + stream.emit('error', er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (addToFront) { + if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true); + } else if (state.ended) { + stream.emit('error', new Error('stream.push() after EOF')); + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + } } + + return needMoreData(state); } -function parseServerFinalMessage(serverData) { - const attrPairs = parseAttributePairs(serverData) - const serverSignature = attrPairs.get('v') - if (!serverSignature) { - throw new Error('SASL: SCRAM-SERVER-FINAL-MESSAGE: server signature is missing') - } else if (!isBase64(serverSignature)) { - throw new Error('SASL: SCRAM-SERVER-FINAL-MESSAGE: server signature must be base64') - } - return { - serverSignature, +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + stream.emit('data', chunk); + stream.read(0); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + + if (state.needReadable) emitReadable(stream); } + maybeReadMore(stream, state); } -function xorBuffers(a, b) { - if (!Buffer.isBuffer(a)) { - throw new TypeError('first argument must be a Buffer') - } - if (!Buffer.isBuffer(b)) { - throw new TypeError('second argument must be a Buffer') - } - if (a.length !== b.length) { - throw new Error('Buffer lengths must match') - } - if (a.length === 0) { - throw new Error('Buffers cannot be empty') +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); } - return Buffer.from(a.map((_, i) => a[i] ^ b[i])) + return er; } -function sha256(text) { - return crypto.createHash('sha256').update(text).digest() +// if it's past the high water mark, we can push in some more. +// Also, if we have no data yet, we can stand some +// more bytes. This is to work around cases where hwm=0, +// such as the repl. Also, if the push() triggered a +// readable event, and the user called read(largeNumber) such that +// needReadable was set, then we ought to push more, so that another +// 'readable' event will be triggered. +function needMoreData(state) { + return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); } -function hmacSha256(key, msg) { - return crypto.createHmac('sha256', key).update(msg).digest() -} +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; -function Hi(password, saltBytes, iterations) { - var ui1 = hmacSha256(password, Buffer.concat([saltBytes, Buffer.from([0, 0, 0, 1])])) - var ui = ui1 - for (var i = 0; i < iterations - 1; i++) { - ui1 = hmacSha256(password, ui1) - ui = xorBuffers(ui, ui1) - } +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = __nccwpck_require__(94841)/* .StringDecoder */ .s; + this._readableState.decoder = new StringDecoder(enc); + this._readableState.encoding = enc; + return this; +}; - return ui +// Don't raise the hwm > 8MB +var MAX_HWM = 0x800000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; } -module.exports = { - startSession, - continueSession, - finalizeSession, +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; } +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; -/***/ }), + if (n !== 0) state.emittedReadable = false; -/***/ 78873: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } -"use strict"; + n = howMuchToRead(n, state); + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } -var types = __nccwpck_require__(77929) + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. -function TypeOverrides(userTypes) { - this._types = userTypes || types - this.text = {} - this.binary = {} -} + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); -TypeOverrides.prototype.getOverrides = function (format) { - switch (format) { - case 'text': - return this.text - case 'binary': - return this.binary - default: - return {} + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); } -} -TypeOverrides.prototype.setTypeParser = function (oid, format, parseFn) { - if (typeof format === 'function') { - parseFn = format - format = 'text' + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); } - this.getOverrides(format)[oid] = parseFn -} - -TypeOverrides.prototype.getTypeParser = function (oid, format) { - format = format || 'text' - return this.getOverrides(format)[oid] || this._types.getTypeParser(oid, format) -} - -module.exports = TypeOverrides - - -/***/ }), - -/***/ 36419: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + if (ret === null) { + state.needReadable = true; + n = 0; + } else { + state.length -= n; + } -const crypto = __nccwpck_require__(76417) + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; -const defaults = __nccwpck_require__(26419) + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } -function escapeElement(elementRepresentation) { - var escaped = elementRepresentation.replace(/\\/g, '\\\\').replace(/"/g, '\\"') + if (ret !== null) this.emit('data', ret); - return '"' + escaped + '"' -} + return ret; +}; -// convert a JS array to a postgres array literal -// uses comma separator so won't work for types like box that use -// a different array separator. -function arrayString(val) { - var result = '{' - for (var i = 0; i < val.length; i++) { - if (i > 0) { - result = result + ',' - } - if (val[i] === null || typeof val[i] === 'undefined') { - result = result + 'NULL' - } else if (Array.isArray(val[i])) { - result = result + arrayString(val[i]) - } else if (val[i] instanceof Buffer) { - result += '\\\\x' + val[i].toString('hex') - } else { - result += escapeElement(prepareValue(val[i])) +function onEofChunk(stream, state) { + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; } } - result = result + '}' - return result + state.ended = true; + + // emit 'readable' now to make sure it gets picked up. + emitReadable(stream); } -// converts values from javascript types -// to their 'raw' counterparts for use as a postgres parameter -// note: you can override this function to provide your own conversion mechanism -// for complex types, etc... -var prepareValue = function (val, seen) { - // null and undefined are both null for postgres - if (val == null) { - return null - } - if (val instanceof Buffer) { - return val - } - if (ArrayBuffer.isView(val)) { - var buf = Buffer.from(val.buffer, val.byteOffset, val.byteLength) - if (buf.length === val.byteLength) { - return buf - } - return buf.slice(val.byteOffset, val.byteOffset + val.byteLength) // Node.js v4 does not support those Buffer.from params - } - if (val instanceof Date) { - if (defaults.parseInputDatesAsUTC) { - return dateToStringUTC(val) - } else { - return dateToString(val) - } - } - if (Array.isArray(val)) { - return arrayString(val) - } - if (typeof val === 'object') { - return prepareObject(val, seen) +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream); } - return val.toString() } -function prepareObject(val, seen) { - if (val && typeof val.toPostgres === 'function') { - seen = seen || [] - if (seen.indexOf(val) !== -1) { - throw new Error('circular reference detected while preparing "' + val + '" for query') - } - seen.push(val) +function emitReadable_(stream) { + debug('emit readable'); + stream.emit('readable'); + flow(stream); +} - return prepareValue(val.toPostgres(prepareValue), seen) +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + pna.nextTick(maybeReadMore_, stream, state); } - return JSON.stringify(val) } -function pad(number, digits) { - number = '' + number - while (number.length < digits) { - number = '0' + number +function maybeReadMore_(stream, state) { + var len = state.length; + while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break;else len = state.length; } - return number + state.readingMore = false; } -function dateToString(date) { - var offset = -date.getTimezoneOffset() +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + this.emit('error', new Error('_read() is not implemented')); +}; - var year = date.getFullYear() - var isBCYear = year < 1 - if (isBCYear) year = Math.abs(year) + 1 // negative years are 1 off their BC representation +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; - var ret = - pad(year, 4) + - '-' + - pad(date.getMonth() + 1, 2) + - '-' + - pad(date.getDate(), 2) + - 'T' + - pad(date.getHours(), 2) + - ':' + - pad(date.getMinutes(), 2) + - ':' + - pad(date.getSeconds(), 2) + - '.' + - pad(date.getMilliseconds(), 3) + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); - if (offset < 0) { - ret += '-' - offset *= -1 - } else { - ret += '+' + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn); + + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } } - ret += pad(Math.floor(offset / 60), 2) + ':' + pad(offset % 60, 2) - if (isBCYear) ret += ' BC' - return ret -} + function onend() { + debug('onend'); + dest.end(); + } -function dateToStringUTC(date) { - var year = date.getUTCFullYear() - var isBCYear = year < 1 - if (isBCYear) year = Math.abs(year) + 1 // negative years are 1 off their BC representation + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); - var ret = - pad(year, 4) + - '-' + - pad(date.getUTCMonth() + 1, 2) + - '-' + - pad(date.getUTCDate(), 2) + - 'T' + - pad(date.getUTCHours(), 2) + - ':' + - pad(date.getUTCMinutes(), 2) + - ':' + - pad(date.getUTCSeconds(), 2) + - '.' + - pad(date.getUTCMilliseconds(), 3) + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); - ret += '+00:00' - if (isBCYear) ret += ' BC' - return ret -} + cleanedUp = true; -function normalizeQueryConfig(config, values, callback) { - // can take in strings or config objects - config = typeof config === 'string' ? { text: config } : config - if (values) { - if (typeof values === 'function') { - config.callback = values - } else { - config.values = values + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + + // If the user pushes more data while we're writing to dest then we'll end up + // in ondata again. However, we only want to increase awaitDrain once because + // dest will only emit one 'drain' event for the multiple writes. + // => Introduce a guard on increasing awaitDrain. + var increasedAwaitDrain = false; + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + increasedAwaitDrain = false; + var ret = dest.write(chunk); + if (false === ret && !increasedAwaitDrain) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', src._readableState.awaitDrain); + src._readableState.awaitDrain++; + increasedAwaitDrain = true; + } + src.pause(); } } - if (callback) { - config.callback = callback + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); } - return config -} -const md5 = function (string) { - return crypto.createHash('md5').update(string, 'utf-8').digest('hex') -} + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); -// See AuthenticationMD5Password at https://www.postgresql.org/docs/current/static/protocol-flow.html -const postgresMd5PasswordHash = function (user, password, salt) { - var inner = md5(password + user) - var outer = md5(Buffer.concat([Buffer.from(inner), salt])) - return 'md5' + outer -} + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); -module.exports = { - prepareValue: function prepareValueWrapper(value) { - // this ensures that extra arguments do not get passed into prepareValue - // by accident, eg: from calling values.map(utils.prepareValue) - return prepareValue(value) - }, - normalizeQueryConfig, - postgresMd5PasswordHash, - md5, -} + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + // tell the dest that it's being piped to + dest.emit('pipe', src); -/***/ }), + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } -/***/ 16926: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + return dest; +}; -"use strict"; +function pipeOnDrain(src) { + return function () { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { hasUnpiped: false }; -var path = __nccwpck_require__(85622) - , Stream = __nccwpck_require__(92413).Stream - , split = __nccwpck_require__(15000) - , util = __nccwpck_require__(31669) - , defaultPort = 5432 - , isWin = (process.platform === 'win32') - , warnStream = process.stderr -; + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; -var S_IRWXG = 56 // 00070(8) - , S_IRWXO = 7 // 00007(8) - , S_IFMT = 61440 // 00170000(8) - , S_IFREG = 32768 // 0100000(8) -; -function isRegFile(mode) { - return ((mode & S_IFMT) == S_IFREG); -} + if (!dest) dest = state.pipes; -var fieldNames = [ 'host', 'port', 'database', 'user', 'password' ]; -var nrOfFields = fieldNames.length; -var passKey = fieldNames[ nrOfFields -1 ]; + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } + // slow case. multiple pipe destinations. -function warn() { - var isWritable = ( - warnStream instanceof Stream && - true === warnStream.writable - ); + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; - if (isWritable) { - var args = Array.prototype.slice.call(arguments).concat("\n"); - warnStream.write( util.format.apply(util, args) ); - } -} + for (var i = 0; i < len; i++) { + dests[i].emit('unpipe', this, unpipeInfo); + }return this; + } + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; -Object.defineProperty(module.exports, "isWin", ({ - get : function() { - return isWin; - } , - set : function(val) { - isWin = val; - } -})); + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + dest.emit('unpipe', this, unpipeInfo); -module.exports.warnTo = function(stream) { - var old = warnStream; - warnStream = stream; - return old; + return this; }; -module.exports.getFileName = function(rawEnv){ - var env = rawEnv || process.env; - var file = env.PGPASSFILE || ( - isWin ? - path.join( env.APPDATA || './' , 'postgresql', 'pgpass.conf' ) : - path.join( env.HOME || './', '.pgpass' ) - ); - return file; -}; +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); -module.exports.usePgPass = function(stats, fname) { - if (Object.prototype.hasOwnProperty.call(process.env, 'PGPASSWORD')) { - return false; + if (ev === 'data') { + // Start flowing on next tick if stream isn't explicitly paused + if (this._readableState.flowing !== false) this.resume(); + } else if (ev === 'readable') { + var state = this._readableState; + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.emittedReadable = false; + if (!state.reading) { + pna.nextTick(nReadingNextTick, this); + } else if (state.length) { + emitReadable(this); + } } + } - if (isWin) { - return true; - } + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; - fname = fname || ''; +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} - if (! isRegFile(stats.mode)) { - warn('WARNING: password file "%s" is not a plain file', fname); - return false; - } +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + state.flowing = true; + resume(this, state); + } + return this; +}; - if (stats.mode & (S_IRWXG | S_IRWXO)) { - /* If password file is insecure, alert the user and ignore it. */ - warn('WARNING: password file "%s" has group or world access; permissions should be u=rw (0600) or less', fname); - return false; - } +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + pna.nextTick(resume_, stream, state); + } +} - return true; -}; +function resume_(stream, state) { + if (!state.reading) { + debug('resume read 0'); + stream.read(0); + } + state.resumeScheduled = false; + state.awaitDrain = 0; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} -var matcher = module.exports.match = function(connInfo, entry) { - return fieldNames.slice(0, -1).reduce(function(prev, field, idx){ - if (idx == 1) { - // the port - if ( Number( connInfo[field] || defaultPort ) === Number( entry[field] ) ) { - return prev && true; - } - } - return prev && ( - entry[field] === '*' || - entry[field] === connInfo[field] - ); - }, true); +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (false !== this._readableState.flowing) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + return this; }; +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null) {} +} -module.exports.getPassword = function(connInfo, stream, cb) { - var pass; - var lineStream = stream.pipe(split()); - - function onLine(line) { - var entry = parseLine(line); - if (entry && isValidEntry(entry) && matcher(connInfo, entry)) { - pass = entry[passKey]; - lineStream.end(); // -> calls onEnd(), but pass is set now - } - } +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; - var onEnd = function() { - stream.destroy(); - cb(pass); - }; + var state = this._readableState; + var paused = false; - var onErr = function(err) { - stream.destroy(); - warn('WARNING: error on reading file: %s', err); - cb(undefined); - }; + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } - stream.on('error', onErr); - lineStream - .on('data', onLine) - .on('end', onEnd) - .on('error', onErr) - ; + _this.push(null); + }); -}; + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; -var parseLine = module.exports.parseLine = function(line) { - if (line.length < 11 || line.match(/^\s+#/)) { - return null; + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); } + }); - var curChar = ''; - var prevChar = ''; - var fieldIdx = 0; - var startIdx = 0; - var endIdx = 0; - var obj = {}; - var isLastField = false; - var addToObj = function(idx, i0, i1) { - var field = line.substring(i0, i1); + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function (method) { + return function () { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } - if (! Object.hasOwnProperty.call(process.env, 'PGPASS_NO_DEESCAPE')) { - field = field.replace(/\\([:\\])/g, '$1'); - } + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } - obj[ fieldNames[idx] ] = field; - }; + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; - for (var i = 0 ; i < line.length-1 ; i += 1) { - curChar = line.charAt(i+1); - prevChar = line.charAt(i); + return this; +}; - isLastField = (fieldIdx == nrOfFields-1); +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._readableState.highWaterMark; + } +}); - if (isLastField) { - addToObj(fieldIdx, startIdx); - break; - } +// exposed for testing purposes only. +Readable._fromList = fromList; - if (i >= 0 && curChar == ':' && prevChar !== '\\') { - addToObj(fieldIdx, startIdx, i+1); +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; - startIdx = i+2; - fieldIdx += 1; - } - } + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = fromListPartial(n, state.buffer, state.decoder); + } - obj = ( Object.keys(obj).length === nrOfFields ) ? obj : null; + return ret; +} - return obj; -}; +// Extracts only enough buffered data to satisfy the amount requested. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromListPartial(n, list, hasStrings) { + var ret; + if (n < list.head.data.length) { + // slice is the same for buffers and strings + ret = list.head.data.slice(0, n); + list.head.data = list.head.data.slice(n); + } else if (n === list.head.data.length) { + // first chunk is a perfect match + ret = list.shift(); + } else { + // result spans more than one buffer + ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); + } + return ret; +} +// Copies a specified amount of characters from the list of buffered data +// chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBufferString(n, list) { + var p = list.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} -var isValidEntry = module.exports.isValidEntry = function(entry){ - var rules = { - // host - 0 : function(x){ - return x.length > 0; - } , - // port - 1 : function(x){ - if (x === '*') { - return true; - } - x = Number(x); - return ( - isFinite(x) && - x > 0 && - x < 9007199254740992 && - Math.floor(x) === x - ); - } , - // database - 2 : function(x){ - return x.length > 0; - } , - // username - 3 : function(x){ - return x.length > 0; - } , - // password - 4 : function(x){ - return x.length > 0; - } - }; +// Copies a specified amount of bytes from the list of buffered data chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBuffer(n, list) { + var ret = Buffer.allocUnsafe(n); + var p = list.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} - for (var idx = 0 ; idx < fieldNames.length ; idx += 1) { - var rule = rules[idx]; - var value = entry[ fieldNames[idx] ] || ''; +function endReadable(stream) { + var state = stream._readableState; - var res = rule(value); - if (!res) { - return false; - } - } + // If we get here before consuming all the bytes, then that is a + // bug in node. Should never happen. + if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); - return true; -}; + if (!state.endEmitted) { + state.ended = true; + pna.nextTick(endReadableNT, state, stream); + } +} +function endReadableNT(state, stream) { + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + } +} +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} /***/ }), -/***/ 19713: +/***/ 34415: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. -var path = __nccwpck_require__(85622) - , fs = __nccwpck_require__(35747) - , helper = __nccwpck_require__(16926) -; +module.exports = Transform; -module.exports = function(connInfo, cb) { - var file = helper.getFileName(); - - fs.stat(file, function(err, stat){ - if (err || !helper.usePgPass(stat, file)) { - return cb(undefined); - } +var Duplex = __nccwpck_require__(41359); - var st = fs.createReadStream(file); +/**/ +var util = Object.create(__nccwpck_require__(95898)); +util.inherits = __nccwpck_require__(44124); +/**/ - helper.getPassword(connInfo, st, cb); - }); -}; +util.inherits(Transform, Duplex); -module.exports.warnTo = helper.warnTo; +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + var cb = ts.writecb; -/***/ }), + if (!cb) { + return this.emit('error', new Error('write callback called multiple times')); + } -/***/ 34702: -/***/ ((__unused_webpack_module, exports) => { + ts.writechunk = null; + ts.writecb = null; -"use strict"; + if (data != null) // single equals check for both `null` and `undefined` + this.push(data); + cb(er); -exports.parse = function (source, transform) { - return new ArrayParser(source, transform).parse() + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } } -class ArrayParser { - constructor (source, transform) { - this.source = source - this.transform = transform || identity - this.position = 0 - this.entries = [] - this.recorded = [] - this.dimension = 0 - } +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); - isEof () { - return this.position >= this.source.length - } + Duplex.call(this, options); - nextCharacter () { - var character = this.source[this.position++] - if (character === '\\') { - return { - value: this.source[this.position++], - escaped: true - } - } - return { - value: character, - escaped: false - } - } + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; - record (character) { - this.recorded.push(character) - } + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; - newEntry (includeEmpty) { - var entry - if (this.recorded.length > 0 || includeEmpty) { - entry = this.recorded.join('') - if (entry === 'NULL' && !includeEmpty) { - entry = null - } - if (entry !== null) entry = this.transform(entry) - this.entries.push(entry) - this.recorded = [] - } - } + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; - consumeDimensions () { - if (this.source[0] === '[') { - while (!this.isEof()) { - var char = this.nextCharacter() - if (char.value === '=') break - } - } - } + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; - parse (nested) { - var character, parser, quote - this.consumeDimensions() - while (!this.isEof()) { - character = this.nextCharacter() - if (character.value === '{' && !quote) { - this.dimension++ - if (this.dimension > 1) { - parser = new ArrayParser(this.source.substr(this.position - 1), this.transform) - this.entries.push(parser.parse(true)) - this.position += parser.position - 2 - } - } else if (character.value === '}' && !quote) { - this.dimension-- - if (!this.dimension) { - this.newEntry() - if (nested) return this.entries - } - } else if (character.value === '"' && !character.escaped) { - if (quote) this.newEntry(true) - quote = !quote - } else if (character.value === ',' && !quote) { - this.newEntry() - } else { - this.record(character.value) - } - } - if (this.dimension !== 0) { - throw new Error('array dimension not balanced') - } - return this.entries + if (typeof options.flush === 'function') this._flush = options.flush; } -} -function identity (value) { - return value + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); } +function prefinish() { + var _this = this; -/***/ }), - -/***/ 79377: -/***/ ((module) => { + if (typeof this._flush === 'function') { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} -"use strict"; +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + throw new Error('_transform() is not implemented'); +}; -module.exports = function parseBytea (input) { - if (/^\\x/.test(input)) { - // new 'hex' style response (pg >9.0) - return new Buffer(input.substr(2), 'hex') +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); } - var output = '' - var i = 0 - while (i < input.length) { - if (input[i] !== '\\') { - output += input[i] - ++i - } else { - if (/[0-7]{3}/.test(input.substr(i + 1, 3))) { - output += String.fromCharCode(parseInt(input.substr(i + 1, 3), 8)) - i += 4 - } else { - var backslashes = 1 - while (i + backslashes < input.length && input[i + backslashes] === '\\') { - backslashes++ - } - for (var k = 0; k < Math.floor(backslashes / 2); ++k) { - output += '\\' - } - i += Math.floor(backslashes / 2) * 2 - } - } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + + if (ts.writechunk !== null && ts.writecb && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; } - return new Buffer(output, 'binary') -} +}; +Transform.prototype._destroy = function (err, cb) { + var _this2 = this; -/***/ }), + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + _this2.emit('close'); + }); +}; -/***/ 13264: -/***/ ((module) => { +function done(stream, er, data) { + if (er) return stream.emit('error', er); -"use strict"; + if (data != null) // single equals check for both `null` and `undefined` + stream.push(data); + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0'); -var DATE_TIME = /(\d{1,})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2})(\.\d{1,})?.*?( BC)?$/ -var DATE = /^(\d{1,})-(\d{2})-(\d{2})( BC)?$/ -var TIME_ZONE = /([Z+-])(\d{2})?:?(\d{2})?:?(\d{2})?/ -var INFINITY = /^-?infinity$/ + if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming'); -module.exports = function parseDate (isoDate) { - if (INFINITY.test(isoDate)) { - // Capitalize to Infinity before passing to Number - return Number(isoDate.replace('i', 'I')) - } - var matches = DATE_TIME.exec(isoDate) + return stream.push(null); +} - if (!matches) { - // Force YYYY-MM-DD dates to be parsed as local time - return getDate(isoDate) || null - } +/***/ }), - var isBC = !!matches[8] - var year = parseInt(matches[1], 10) - if (isBC) { - year = bcYearToNegativeYear(year) - } +/***/ 26993: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - var month = parseInt(matches[2], 10) - 1 - var day = matches[3] - var hour = parseInt(matches[4], 10) - var minute = parseInt(matches[5], 10) - var second = parseInt(matches[6], 10) +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. - var ms = matches[7] - ms = ms ? 1000 * parseFloat(ms) : 0 +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. - var date - var offset = timeZoneOffset(isoDate) - if (offset != null) { - date = new Date(Date.UTC(year, month, day, hour, minute, second, ms)) - // Account for years from 0 to 99 being interpreted as 1900-1999 - // by Date.UTC / the multi-argument form of the Date constructor - if (is0To99(year)) { - date.setUTCFullYear(year) - } - if (offset !== 0) { - date.setTime(date.getTime() - offset) - } - } else { - date = new Date(year, month, day, hour, minute, second, ms) +/**/ - if (is0To99(year)) { - date.setFullYear(year) - } - } +var pna = __nccwpck_require__(47810); +/**/ - return date +module.exports = Writable; + +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; } -function getDate (isoDate) { - var matches = DATE.exec(isoDate) - if (!matches) { - return - } +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; - var year = parseInt(matches[1], 10) - var isBC = !!matches[4] - if (isBC) { - year = bcYearToNegativeYear(year) - } + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ - var month = parseInt(matches[2], 10) - 1 - var day = matches[3] - // YYYY-MM-DD will be parsed as local time - var date = new Date(year, month, day) +/**/ +var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick; +/**/ - if (is0To99(year)) { - date.setFullYear(year) - } +/**/ +var Duplex; +/**/ - return date -} +Writable.WritableState = WritableState; -// match timezones: -// Z (UTC) -// -05 -// +06:30 -function timeZoneOffset (isoDate) { - if (isoDate.endsWith('+00')) { - return 0 - } +/**/ +var util = Object.create(__nccwpck_require__(95898)); +util.inherits = __nccwpck_require__(44124); +/**/ - var zone = TIME_ZONE.exec(isoDate.split(' ')[1]) - if (!zone) return - var type = zone[1] +/**/ +var internalUtil = { + deprecate: __nccwpck_require__(65278) +}; +/**/ - if (type === 'Z') { - return 0 - } - var sign = type === '-' ? -1 : 1 - var offset = parseInt(zone[2], 10) * 3600 + - parseInt(zone[3] || 0, 10) * 60 + - parseInt(zone[4] || 0, 10) +/**/ +var Stream = __nccwpck_require__(62387); +/**/ - return offset * sign * 1000 -} +/**/ -function bcYearToNegativeYear (year) { - // Account for numerical difference between representations of BC years - // See: https://github.com/bendrucker/postgres-date/issues/5 - return -(year - 1) +var Buffer = __nccwpck_require__(21867).Buffer; +var OurUint8Array = global.Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); } - -function is0To99 (num) { - return num >= 0 && num < 100 +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; } +/**/ -/***/ }), +var destroyImpl = __nccwpck_require__(97049); -/***/ 19991: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +util.inherits(Writable, Stream); -"use strict"; +function nop() {} +function WritableState(options, stream) { + Duplex = Duplex || __nccwpck_require__(41359); -var extend = __nccwpck_require__(98736) + options = options || {}; -module.exports = PostgresInterval + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; -function PostgresInterval (raw) { - if (!(this instanceof PostgresInterval)) { - return new PostgresInterval(raw) - } - extend(this, parse(raw)) -} -var properties = ['seconds', 'minutes', 'hours', 'days', 'months', 'years'] -PostgresInterval.prototype.toPostgres = function () { - var filtered = properties.filter(this.hasOwnProperty, this) + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; - // In addition to `properties`, we need to account for fractions of seconds. - if (this.milliseconds && filtered.indexOf('seconds') < 0) { - filtered.push('seconds') - } + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; - if (filtered.length === 0) return '0' - return filtered - .map(function (property) { - var value = this[property] || 0 + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + var hwm = options.highWaterMark; + var writableHwm = options.writableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; - // Account for fractional part of seconds, - // remove trailing zeroes. - if (property === 'seconds' && this.milliseconds) { - value = (value + this.milliseconds / 1000).toFixed(6).replace(/\.?0+$/, '') - } + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm; - return value + ' ' + property - }, this) - .join(' ') -} + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); -var propertiesISOEquivalent = { - years: 'Y', - months: 'M', - days: 'D', - hours: 'H', - minutes: 'M', - seconds: 'S' -} -var dateProperties = ['years', 'months', 'days'] -var timeProperties = ['hours', 'minutes', 'seconds'] -// according to ISO 8601 -PostgresInterval.prototype.toISOString = PostgresInterval.prototype.toISO = function () { - var datePart = dateProperties - .map(buildProperty, this) - .join('') + // if _final has been called + this.finalCalled = false; - var timePart = timeProperties - .map(buildProperty, this) - .join('') + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; - return 'P' + datePart + 'T' + timePart + // has it been destroyed + this.destroyed = false; - function buildProperty (property) { - var value = this[property] || 0 + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; - // Account for fractional part of seconds, - // remove trailing zeroes. - if (property === 'seconds' && this.milliseconds) { - value = (value + this.milliseconds / 1000).toFixed(6).replace(/0+$/, '') - } + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; - return value + propertiesISOEquivalent[property] - } -} + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; -var NUMBER = '([+-]?\\d+)' -var YEAR = NUMBER + '\\s+years?' -var MONTH = NUMBER + '\\s+mons?' -var DAY = NUMBER + '\\s+days?' -var TIME = '([+-])?([\\d]*):(\\d\\d):(\\d\\d)\\.?(\\d{1,6})?' -var INTERVAL = new RegExp([YEAR, MONTH, DAY, TIME].map(function (regexString) { - return '(' + regexString + ')?' -}) - .join('\\s*')) + // a flag to see when we're in the middle of a write. + this.writing = false; -// Positions of values in regex match -var positions = { - years: 2, - months: 4, - days: 6, - hours: 9, - minutes: 10, - seconds: 11, - milliseconds: 12 -} -// We can use negative time -var negatives = ['hours', 'minutes', 'seconds', 'milliseconds'] + // when true all writes will be buffered until .uncork() call + this.corked = 0; -function parseMilliseconds (fraction) { - // add omitted zeroes - var microseconds = fraction + '000000'.slice(fraction.length) - return parseInt(microseconds, 10) / 1000 -} + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; -function parse (interval) { - if (!interval) return {} - var matches = INTERVAL.exec(interval) - var isNegative = matches[8] === '-' - return Object.keys(positions) - .reduce(function (parsed, property) { - var position = positions[property] - var value = matches[position] - // no empty string - if (!value) return parsed - // milliseconds are actually microseconds (up to 6 digits) - // with omitted trailing zeroes. - value = property === 'milliseconds' - ? parseMilliseconds(value) - : parseInt(value, 10) - // no zeros - if (!value) return parsed - if (isNegative && ~negatives.indexOf(property)) { - value *= -1 - } - parsed[property] = value - return parsed - }, {}) -} + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; -/***/ }), + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; -/***/ 47810: -/***/ ((module) => { + // the amount that is being written when _write is called. + this.writelen = 0; -"use strict"; + this.bufferedRequest = null; + this.lastBufferedRequest = null; + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; -if (typeof process === 'undefined' || - !process.version || - process.version.indexOf('v0.') === 0 || - process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) { - module.exports = { nextTick: nextTick }; -} else { - module.exports = process + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); } -function nextTick(fn, arg1, arg2, arg3) { - if (typeof fn !== 'function') { - throw new TypeError('"callback" argument must be a function'); +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; } - var len = arguments.length; - var args, i; - switch (len) { - case 0: - case 1: - return process.nextTick(fn); - case 2: - return process.nextTick(function afterTickOne() { - fn.call(null, arg1); - }); - case 3: - return process.nextTick(function afterTickTwo() { - fn.call(null, arg1, arg2); - }); - case 4: - return process.nextTick(function afterTickThree() { - fn.call(null, arg1, arg2, arg3); + return out; +}; + +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function () { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') }); - default: - args = new Array(len - 1); - i = 0; - while (i < args.length) { - args[i++] = arguments[i]; + } catch (_) {} +})(); + +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function (object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + + return object && object._writableState instanceof WritableState; } - return process.nextTick(function afterTick() { - fn.apply(null, args); - }); - } + }); +} else { + realHasInstance = function (object) { + return object instanceof this; + }; } +function Writable(options) { + Duplex = Duplex || __nccwpck_require__(41359); + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. -/***/ }), + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) { + return new Writable(options); + } -/***/ 29975: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + this._writableState = new WritableState(options, this); -"use strict"; -/*eslint no-var:0, prefer-arrow-callback: 0, object-shorthand: 0 */ + // legacy. + this.writable = true; + if (options) { + if (typeof options.write === 'function') this._write = options.write; + if (typeof options.writev === 'function') this._writev = options.writev; -var Punycode = __nccwpck_require__(94213); + if (typeof options.destroy === 'function') this._destroy = options.destroy; + if (typeof options.final === 'function') this._final = options.final; + } -var internals = {}; + Stream.call(this); +} +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + this.emit('error', new Error('Cannot pipe, not readable')); +}; -// -// Read rules from file. -// -internals.rules = __nccwpck_require__(2156).map(function (rule) { +function writeAfterEnd(stream, cb) { + var er = new Error('write after end'); + // TODO: defer error events consistently everywhere, not just the cb + stream.emit('error', er); + pna.nextTick(cb, er); +} - return { - rule: rule, - suffix: rule.replace(/^(\*\.|\!)/, ''), - punySuffix: -1, - wildcard: rule.charAt(0) === '*', - exception: rule.charAt(0) === '!' - }; -}); +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var valid = true; + var er = false; + if (chunk === null) { + er = new TypeError('May not write null values to stream'); + } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + if (er) { + stream.emit('error', er); + pna.nextTick(cb, er); + valid = false; + } + return valid; +} -// -// Check is given string ends with `suffix`. -// -internals.endsWith = function (str, suffix) { +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); - return str.indexOf(suffix, str.length - suffix.length) !== -1; -}; + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } -// -// Find rule for a given domain. -// -internals.findRule = function (domain) { + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; - var punyDomain = Punycode.toASCII(domain); - return internals.rules.reduce(function (memo, rule) { + if (typeof cb !== 'function') cb = nop; - if (rule.punySuffix === -1){ - rule.punySuffix = Punycode.toASCII(rule.suffix); - } - if (!internals.endsWith(punyDomain, '.' + rule.punySuffix) && punyDomain !== rule.punySuffix) { - return memo; - } - // This has been commented out as it never seems to run. This is because - // sub tlds always appear after their parents and we never find a shorter - // match. - //if (memo) { - // var memoSuffix = Punycode.toASCII(memo.suffix); - // if (memoSuffix.length >= punySuffix.length) { - // return memo; - // } - //} - return rule; - }, null); + if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + + return ret; }; +Writable.prototype.cork = function () { + var state = this._writableState; -// -// Error codes and messages. -// -exports.errorCodes = { - DOMAIN_TOO_SHORT: 'Domain name too short.', - DOMAIN_TOO_LONG: 'Domain name too long. It should be no more than 255 chars.', - LABEL_STARTS_WITH_DASH: 'Domain name label can not start with a dash.', - LABEL_ENDS_WITH_DASH: 'Domain name label can not end with a dash.', - LABEL_TOO_LONG: 'Domain name label should be at most 63 chars long.', - LABEL_TOO_SHORT: 'Domain name label should be at least 1 character long.', - LABEL_INVALID_CHARS: 'Domain name label can only contain alphanumeric characters or dashes.' + state.corked++; }; +Writable.prototype.uncork = function () { + var state = this._writableState; -// -// Validate domain name and throw if not valid. -// -// From wikipedia: -// -// Hostnames are composed of series of labels concatenated with dots, as are all -// domain names. Each label must be between 1 and 63 characters long, and the -// entire hostname (including the delimiting dots) has a maximum of 255 chars. -// -// Allowed chars: -// -// * `a-z` -// * `0-9` -// * `-` but not as a starting or ending character -// * `.` as a separator for the textual portions of a domain name -// -// * http://en.wikipedia.org/wiki/Domain_name -// * http://en.wikipedia.org/wiki/Hostname -// -internals.validate = function (input) { - - // Before we can validate we need to take care of IDNs with unicode chars. - var ascii = Punycode.toASCII(input); + if (state.corked) { + state.corked--; - if (ascii.length < 1) { - return 'DOMAIN_TOO_SHORT'; - } - if (ascii.length > 255) { - return 'DOMAIN_TOO_LONG'; + if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); } +}; - // Check each part's length and allowed chars. - var labels = ascii.split('.'); - var label; +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; - for (var i = 0; i < labels.length; ++i) { - label = labels[i]; - if (!label.length) { - return 'LABEL_TOO_SHORT'; - } - if (label.length > 63) { - return 'LABEL_TOO_LONG'; - } - if (label.charAt(0) === '-') { - return 'LABEL_STARTS_WITH_DASH'; - } - if (label.charAt(label.length - 1) === '-') { - return 'LABEL_ENDS_WITH_DASH'; - } - if (!/^[a-z0-9\-]+$/.test(label)) { - return 'LABEL_INVALID_CHARS'; - } +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); } -}; + return chunk; +} +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); -// -// Public API -// +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + var len = state.objectMode ? 1 : chunk.length; + state.length += len; -// -// Parse domain. -// -exports.parse = function (input) { + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; - if (typeof input !== 'string') { - throw new TypeError('Domain name must be a string.'); + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); } - // Force domain to lowercase. - var domain = input.slice(0).toLowerCase(); + return ret; +} - // Handle FQDN. - // TODO: Simply remove trailing dot? - if (domain.charAt(domain.length - 1) === '.') { - domain = domain.slice(0, domain.length - 1); - } +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} - // Validate and sanitise input. - var error = internals.validate(domain); - if (error) { - return { - input: input, - error: { - message: exports.errorCodes[error], - code: error - } - }; +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + pna.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + pna.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); } +} - var parsed = { - input: input, - tld: null, - sld: null, - domain: null, - subdomain: null, - listed: false - }; +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} - var domainParts = domain.split('.'); +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; - // Non-Internet TLD - if (domainParts[domainParts.length - 1] === 'local') { - return parsed; - } + onwriteStateUpdate(state); - var handlePunycode = function () { + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state); - if (!/xn--/.test(domain)) { - return parsed; + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); } - if (parsed.domain) { - parsed.domain = Punycode.toASCII(parsed.domain); + + if (sync) { + /**/ + asyncWrite(afterWrite, stream, state, finished, cb); + /**/ + } else { + afterWrite(stream, state, finished, cb); } - if (parsed.subdomain) { - parsed.subdomain = Punycode.toASCII(parsed.subdomain); + } +} + +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; } - return parsed; - }; + buffer.allBuffers = allBuffers; - var rule = internals.findRule(domain); + doWrite(stream, state, true, state.length, buffer, '', holder.finish); - // Unlisted tld. - if (!rule) { - if (domainParts.length < 2) { - return parsed; + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); } - parsed.tld = domainParts.pop(); - parsed.sld = domainParts.pop(); - parsed.domain = [parsed.sld, parsed.tld].join('.'); - if (domainParts.length) { - parsed.subdomain = domainParts.pop(); + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } } - return handlePunycode(); + + if (entry === null) state.lastBufferedRequest = null; } - // At this point we know the public suffix is listed. - parsed.listed = true; + state.bufferedRequest = entry; + state.bufferProcessing = false; +} - var tldParts = rule.suffix.split('.'); - var privateParts = domainParts.slice(0, domainParts.length - tldParts.length); +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new Error('_write() is not implemented')); +}; - if (rule.exception) { - privateParts.push(tldParts.shift()); +Writable.prototype._writev = null; + +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; } - parsed.tld = tldParts.join('.'); + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); - if (!privateParts.length) { - return handlePunycode(); + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); } - if (rule.wildcard) { - tldParts.unshift(privateParts.pop()); - parsed.tld = tldParts.join('.'); + // ignore unnecessary end() calls. + if (!state.ending && !state.finished) endWritable(this, state, cb); +}; + +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + stream.emit('error', err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function') { + state.pendingcb++; + state.finalCalled = true; + pna.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } } +} - if (!privateParts.length) { - return handlePunycode(); +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + } } + return need; +} - parsed.sld = privateParts.pop(); - parsed.domain = [parsed.sld, parsed.tld].join('.'); +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) pna.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} - if (privateParts.length) { - parsed.subdomain = privateParts.join('.'); +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + if (state.corkedRequestsFree) { + state.corkedRequestsFree.next = corkReq; + } else { + state.corkedRequestsFree = corkReq; } +} - return handlePunycode(); +Object.defineProperty(Writable.prototype, 'destroyed', { + get: function () { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); + +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + this.end(); + cb(err); }; +/***/ }), + +/***/ 27053: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; -// -// Get domain. -// -exports.get = function (domain) { - if (!domain) { - return null; - } - return exports.parse(domain).domain || null; -}; +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } +var Buffer = __nccwpck_require__(21867).Buffer; +var util = __nccwpck_require__(31669); -// -// Check whether domain belongs to a known public suffix. -// -exports.isValid = function (domain) { +function copyBuffer(src, target, offset) { + src.copy(target, offset); +} - var parsed = exports.parse(domain); - return Boolean(parsed.domain && parsed.listed); -}; +module.exports = function () { + function BufferList() { + _classCallCheck(this, BufferList); + this.head = null; + this.tail = null; + this.length = 0; + } -/***/ }), + BufferList.prototype.push = function push(v) { + var entry = { data: v, next: null }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + }; -/***/ 74907: -/***/ ((module) => { + BufferList.prototype.unshift = function unshift(v) { + var entry = { data: v, next: this.head }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + }; -"use strict"; + BufferList.prototype.shift = function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + }; + BufferList.prototype.clear = function clear() { + this.head = this.tail = null; + this.length = 0; + }; -var replace = String.prototype.replace; -var percentTwenties = /%20/g; + BufferList.prototype.join = function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) { + ret += s + p.data; + }return ret; + }; -module.exports = { - 'default': 'RFC3986', - formatters: { - RFC1738: function (value) { - return replace.call(value, percentTwenties, '+'); - }, - RFC3986: function (value) { - return value; - } - }, - RFC1738: 'RFC1738', - RFC3986: 'RFC3986' -}; + BufferList.prototype.concat = function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + if (this.length === 1) return this.head.data; + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + }; + + return BufferList; +}(); +if (util && util.inspect && util.inspect.custom) { + module.exports.prototype[util.inspect.custom] = function () { + var obj = util.inspect({ length: this.length }); + return this.constructor.name + ' ' + obj; + }; +} /***/ }), -/***/ 22760: +/***/ 97049: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -var stringify = __nccwpck_require__(79954); -var parse = __nccwpck_require__(33912); -var formats = __nccwpck_require__(74907); - -module.exports = { - formats: formats, - parse: parse, - stringify: stringify -}; +/**/ +var pna = __nccwpck_require__(47810); +/**/ -/***/ }), +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; -/***/ 33912: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; -"use strict"; + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err && (!this._writableState || !this._writableState.errorEmitted)) { + pna.nextTick(emitErrorNT, this, err); + } + return this; + } + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks -var utils = __nccwpck_require__(72360); + if (this._readableState) { + this._readableState.destroyed = true; + } -var has = Object.prototype.hasOwnProperty; + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } -var defaults = { - allowDots: false, - allowPrototypes: false, - arrayLimit: 20, - decoder: utils.decode, - delimiter: '&', - depth: 5, - parameterLimit: 1000, - plainObjects: false, - strictNullHandling: false -}; + this._destroy(err || null, function (err) { + if (!cb && err) { + pna.nextTick(emitErrorNT, _this, err); + if (_this._writableState) { + _this._writableState.errorEmitted = true; + } + } else if (cb) { + cb(err); + } + }); -var parseValues = function parseQueryStringValues(str, options) { - var obj = {}; - var cleanStr = options.ignoreQueryPrefix ? str.replace(/^\?/, '') : str; - var limit = options.parameterLimit === Infinity ? undefined : options.parameterLimit; - var parts = cleanStr.split(options.delimiter, limit); + return this; +} - for (var i = 0; i < parts.length; ++i) { - var part = parts[i]; +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } - var bracketEqualsPos = part.indexOf(']='); - var pos = bracketEqualsPos === -1 ? part.indexOf('=') : bracketEqualsPos + 1; + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} - var key, val; - if (pos === -1) { - key = options.decoder(part, defaults.decoder); - val = options.strictNullHandling ? null : ''; - } else { - key = options.decoder(part.slice(0, pos), defaults.decoder); - val = options.decoder(part.slice(pos + 1), defaults.decoder); - } - if (has.call(obj, key)) { - obj[key] = [].concat(obj[key]).concat(val); - } else { - obj[key] = val; - } - } +function emitErrorNT(self, err) { + self.emit('error', err); +} - return obj; +module.exports = { + destroy: destroy, + undestroy: undestroy }; -var parseObject = function (chain, val, options) { - var leaf = val; +/***/ }), - for (var i = chain.length - 1; i >= 0; --i) { - var obj; - var root = chain[i]; +/***/ 62387: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (root === '[]') { - obj = []; - obj = obj.concat(leaf); - } else { - obj = options.plainObjects ? Object.create(null) : {}; - var cleanRoot = root.charAt(0) === '[' && root.charAt(root.length - 1) === ']' ? root.slice(1, -1) : root; - var index = parseInt(cleanRoot, 10); - if ( - !isNaN(index) - && root !== cleanRoot - && String(index) === cleanRoot - && index >= 0 - && (options.parseArrays && index <= options.arrayLimit) - ) { - obj = []; - obj[index] = leaf; - } else { - obj[cleanRoot] = leaf; - } - } +module.exports = __nccwpck_require__(92413); - leaf = obj; - } - return leaf; -}; +/***/ }), -var parseKeys = function parseQueryStringKeys(givenKey, val, options) { - if (!givenKey) { - return; - } +/***/ 51642: +/***/ ((module, exports, __nccwpck_require__) => { - // Transform dot notation to bracket notation - var key = options.allowDots ? givenKey.replace(/\.([^.[]+)/g, '[$1]') : givenKey; +var Stream = __nccwpck_require__(92413); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream; + exports = module.exports = Stream.Readable; + exports.Readable = Stream.Readable; + exports.Writable = Stream.Writable; + exports.Duplex = Stream.Duplex; + exports.Transform = Stream.Transform; + exports.PassThrough = Stream.PassThrough; + exports.Stream = Stream; +} else { + exports = module.exports = __nccwpck_require__(51433); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = __nccwpck_require__(26993); + exports.Duplex = __nccwpck_require__(41359); + exports.Transform = __nccwpck_require__(34415); + exports.PassThrough = __nccwpck_require__(81542); +} - // The regex chunks - var brackets = /(\[[^[\]]*])/; - var child = /(\[[^[\]]*])/g; +/***/ }), - // Get the parent +/***/ 29977: +/***/ (() => { - var segment = brackets.exec(key); - var parent = segment ? key.slice(0, segment.index) : key; +/*! ***************************************************************************** +Copyright (C) Microsoft. All rights reserved. +Licensed under the Apache License, Version 2.0 (the "License"); you may not use +this file except in compliance with the License. You may obtain a copy of the +License at http://www.apache.org/licenses/LICENSE-2.0 - // Stash the parent if it exists +THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED +WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, +MERCHANTABLITY OR NON-INFRINGEMENT. - var keys = []; - if (parent) { - // If we aren't using plain objects, optionally prefix keys - // that would overwrite object prototype properties - if (!options.plainObjects && has.call(Object.prototype, parent)) { - if (!options.allowPrototypes) { - return; +See the Apache Version 2.0 License for specific language governing permissions +and limitations under the License. +***************************************************************************** */ +var Reflect; +(function (Reflect) { + // Metadata Proposal + // https://rbuckton.github.io/reflect-metadata/ + (function (factory) { + var root = typeof global === "object" ? global : + typeof self === "object" ? self : + typeof this === "object" ? this : + Function("return this;")(); + var exporter = makeExporter(Reflect); + if (typeof root.Reflect === "undefined") { + root.Reflect = Reflect; + } + else { + exporter = makeExporter(root.Reflect, exporter); + } + factory(exporter); + function makeExporter(target, previous) { + return function (key, value) { + if (typeof target[key] !== "function") { + Object.defineProperty(target, key, { configurable: true, writable: true, value: value }); + } + if (previous) + previous(key, value); + }; + } + })(function (exporter) { + var hasOwn = Object.prototype.hasOwnProperty; + // feature test for Symbol support + var supportsSymbol = typeof Symbol === "function"; + var toPrimitiveSymbol = supportsSymbol && typeof Symbol.toPrimitive !== "undefined" ? Symbol.toPrimitive : "@@toPrimitive"; + var iteratorSymbol = supportsSymbol && typeof Symbol.iterator !== "undefined" ? Symbol.iterator : "@@iterator"; + var supportsCreate = typeof Object.create === "function"; // feature test for Object.create support + var supportsProto = { __proto__: [] } instanceof Array; // feature test for __proto__ support + var downLevel = !supportsCreate && !supportsProto; + var HashMap = { + // create an object in dictionary mode (a.k.a. "slow" mode in v8) + create: supportsCreate + ? function () { return MakeDictionary(Object.create(null)); } + : supportsProto + ? function () { return MakeDictionary({ __proto__: null }); } + : function () { return MakeDictionary({}); }, + has: downLevel + ? function (map, key) { return hasOwn.call(map, key); } + : function (map, key) { return key in map; }, + get: downLevel + ? function (map, key) { return hasOwn.call(map, key) ? map[key] : undefined; } + : function (map, key) { return map[key]; }, + }; + // Load global or shim versions of Map, Set, and WeakMap + var functionPrototype = Object.getPrototypeOf(Function); + var usePolyfill = typeof process === "object" && process.env && process.env["REFLECT_METADATA_USE_MAP_POLYFILL"] === "true"; + var _Map = !usePolyfill && typeof Map === "function" && typeof Map.prototype.entries === "function" ? Map : CreateMapPolyfill(); + var _Set = !usePolyfill && typeof Set === "function" && typeof Set.prototype.entries === "function" ? Set : CreateSetPolyfill(); + var _WeakMap = !usePolyfill && typeof WeakMap === "function" ? WeakMap : CreateWeakMapPolyfill(); + // [[Metadata]] internal slot + // https://rbuckton.github.io/reflect-metadata/#ordinary-object-internal-methods-and-internal-slots + var Metadata = new _WeakMap(); + /** + * Applies a set of decorators to a property of a target object. + * @param decorators An array of decorators. + * @param target The target object. + * @param propertyKey (Optional) The property key to decorate. + * @param attributes (Optional) The property descriptor for the target key. + * @remarks Decorators are applied in reverse order. + * @example + * + * class Example { + * // property declarations are not part of ES6, though they are valid in TypeScript: + * // static staticProperty; + * // property; + * + * constructor(p) { } + * static staticMethod(p) { } + * method(p) { } + * } + * + * // constructor + * Example = Reflect.decorate(decoratorsArray, Example); + * + * // property (on constructor) + * Reflect.decorate(decoratorsArray, Example, "staticProperty"); + * + * // property (on prototype) + * Reflect.decorate(decoratorsArray, Example.prototype, "property"); + * + * // method (on constructor) + * Object.defineProperty(Example, "staticMethod", + * Reflect.decorate(decoratorsArray, Example, "staticMethod", + * Object.getOwnPropertyDescriptor(Example, "staticMethod"))); + * + * // method (on prototype) + * Object.defineProperty(Example.prototype, "method", + * Reflect.decorate(decoratorsArray, Example.prototype, "method", + * Object.getOwnPropertyDescriptor(Example.prototype, "method"))); + * + */ + function decorate(decorators, target, propertyKey, attributes) { + if (!IsUndefined(propertyKey)) { + if (!IsArray(decorators)) + throw new TypeError(); + if (!IsObject(target)) + throw new TypeError(); + if (!IsObject(attributes) && !IsUndefined(attributes) && !IsNull(attributes)) + throw new TypeError(); + if (IsNull(attributes)) + attributes = undefined; + propertyKey = ToPropertyKey(propertyKey); + return DecorateProperty(decorators, target, propertyKey, attributes); + } + else { + if (!IsArray(decorators)) + throw new TypeError(); + if (!IsConstructor(target)) + throw new TypeError(); + return DecorateConstructor(decorators, target); + } + } + exporter("decorate", decorate); + // 4.1.2 Reflect.metadata(metadataKey, metadataValue) + // https://rbuckton.github.io/reflect-metadata/#reflect.metadata + /** + * A default metadata decorator factory that can be used on a class, class member, or parameter. + * @param metadataKey The key for the metadata entry. + * @param metadataValue The value for the metadata entry. + * @returns A decorator function. + * @remarks + * If `metadataKey` is already defined for the target and target key, the + * metadataValue for that key will be overwritten. + * @example + * + * // constructor + * @Reflect.metadata(key, value) + * class Example { + * } + * + * // property (on constructor, TypeScript only) + * class Example { + * @Reflect.metadata(key, value) + * static staticProperty; + * } + * + * // property (on prototype, TypeScript only) + * class Example { + * @Reflect.metadata(key, value) + * property; + * } + * + * // method (on constructor) + * class Example { + * @Reflect.metadata(key, value) + * static staticMethod() { } + * } + * + * // method (on prototype) + * class Example { + * @Reflect.metadata(key, value) + * method() { } + * } + * + */ + function metadata(metadataKey, metadataValue) { + function decorator(target, propertyKey) { + if (!IsObject(target)) + throw new TypeError(); + if (!IsUndefined(propertyKey) && !IsPropertyKey(propertyKey)) + throw new TypeError(); + OrdinaryDefineOwnMetadata(metadataKey, metadataValue, target, propertyKey); + } + return decorator; + } + exporter("metadata", metadata); + /** + * Define a unique metadata entry on the target. + * @param metadataKey A key used to store and retrieve metadata. + * @param metadataValue A value that contains attached metadata. + * @param target The target object on which to define metadata. + * @param propertyKey (Optional) The property key for the target. + * @example + * + * class Example { + * // property declarations are not part of ES6, though they are valid in TypeScript: + * // static staticProperty; + * // property; + * + * constructor(p) { } + * static staticMethod(p) { } + * method(p) { } + * } + * + * // constructor + * Reflect.defineMetadata("custom:annotation", options, Example); + * + * // property (on constructor) + * Reflect.defineMetadata("custom:annotation", options, Example, "staticProperty"); + * + * // property (on prototype) + * Reflect.defineMetadata("custom:annotation", options, Example.prototype, "property"); + * + * // method (on constructor) + * Reflect.defineMetadata("custom:annotation", options, Example, "staticMethod"); + * + * // method (on prototype) + * Reflect.defineMetadata("custom:annotation", options, Example.prototype, "method"); + * + * // decorator factory as metadata-producing annotation. + * function MyAnnotation(options): Decorator { + * return (target, key?) => Reflect.defineMetadata("custom:annotation", options, target, key); + * } + * + */ + function defineMetadata(metadataKey, metadataValue, target, propertyKey) { + if (!IsObject(target)) + throw new TypeError(); + if (!IsUndefined(propertyKey)) + propertyKey = ToPropertyKey(propertyKey); + return OrdinaryDefineOwnMetadata(metadataKey, metadataValue, target, propertyKey); + } + exporter("defineMetadata", defineMetadata); + /** + * Gets a value indicating whether the target object or its prototype chain has the provided metadata key defined. + * @param metadataKey A key used to store and retrieve metadata. + * @param target The target object on which the metadata is defined. + * @param propertyKey (Optional) The property key for the target. + * @returns `true` if the metadata key was defined on the target object or its prototype chain; otherwise, `false`. + * @example + * + * class Example { + * // property declarations are not part of ES6, though they are valid in TypeScript: + * // static staticProperty; + * // property; + * + * constructor(p) { } + * static staticMethod(p) { } + * method(p) { } + * } + * + * // constructor + * result = Reflect.hasMetadata("custom:annotation", Example); + * + * // property (on constructor) + * result = Reflect.hasMetadata("custom:annotation", Example, "staticProperty"); + * + * // property (on prototype) + * result = Reflect.hasMetadata("custom:annotation", Example.prototype, "property"); + * + * // method (on constructor) + * result = Reflect.hasMetadata("custom:annotation", Example, "staticMethod"); + * + * // method (on prototype) + * result = Reflect.hasMetadata("custom:annotation", Example.prototype, "method"); + * + */ + function hasMetadata(metadataKey, target, propertyKey) { + if (!IsObject(target)) + throw new TypeError(); + if (!IsUndefined(propertyKey)) + propertyKey = ToPropertyKey(propertyKey); + return OrdinaryHasMetadata(metadataKey, target, propertyKey); + } + exporter("hasMetadata", hasMetadata); + /** + * Gets a value indicating whether the target object has the provided metadata key defined. + * @param metadataKey A key used to store and retrieve metadata. + * @param target The target object on which the metadata is defined. + * @param propertyKey (Optional) The property key for the target. + * @returns `true` if the metadata key was defined on the target object; otherwise, `false`. + * @example + * + * class Example { + * // property declarations are not part of ES6, though they are valid in TypeScript: + * // static staticProperty; + * // property; + * + * constructor(p) { } + * static staticMethod(p) { } + * method(p) { } + * } + * + * // constructor + * result = Reflect.hasOwnMetadata("custom:annotation", Example); + * + * // property (on constructor) + * result = Reflect.hasOwnMetadata("custom:annotation", Example, "staticProperty"); + * + * // property (on prototype) + * result = Reflect.hasOwnMetadata("custom:annotation", Example.prototype, "property"); + * + * // method (on constructor) + * result = Reflect.hasOwnMetadata("custom:annotation", Example, "staticMethod"); + * + * // method (on prototype) + * result = Reflect.hasOwnMetadata("custom:annotation", Example.prototype, "method"); + * + */ + function hasOwnMetadata(metadataKey, target, propertyKey) { + if (!IsObject(target)) + throw new TypeError(); + if (!IsUndefined(propertyKey)) + propertyKey = ToPropertyKey(propertyKey); + return OrdinaryHasOwnMetadata(metadataKey, target, propertyKey); + } + exporter("hasOwnMetadata", hasOwnMetadata); + /** + * Gets the metadata value for the provided metadata key on the target object or its prototype chain. + * @param metadataKey A key used to store and retrieve metadata. + * @param target The target object on which the metadata is defined. + * @param propertyKey (Optional) The property key for the target. + * @returns The metadata value for the metadata key if found; otherwise, `undefined`. + * @example + * + * class Example { + * // property declarations are not part of ES6, though they are valid in TypeScript: + * // static staticProperty; + * // property; + * + * constructor(p) { } + * static staticMethod(p) { } + * method(p) { } + * } + * + * // constructor + * result = Reflect.getMetadata("custom:annotation", Example); + * + * // property (on constructor) + * result = Reflect.getMetadata("custom:annotation", Example, "staticProperty"); + * + * // property (on prototype) + * result = Reflect.getMetadata("custom:annotation", Example.prototype, "property"); + * + * // method (on constructor) + * result = Reflect.getMetadata("custom:annotation", Example, "staticMethod"); + * + * // method (on prototype) + * result = Reflect.getMetadata("custom:annotation", Example.prototype, "method"); + * + */ + function getMetadata(metadataKey, target, propertyKey) { + if (!IsObject(target)) + throw new TypeError(); + if (!IsUndefined(propertyKey)) + propertyKey = ToPropertyKey(propertyKey); + return OrdinaryGetMetadata(metadataKey, target, propertyKey); + } + exporter("getMetadata", getMetadata); + /** + * Gets the metadata value for the provided metadata key on the target object. + * @param metadataKey A key used to store and retrieve metadata. + * @param target The target object on which the metadata is defined. + * @param propertyKey (Optional) The property key for the target. + * @returns The metadata value for the metadata key if found; otherwise, `undefined`. + * @example + * + * class Example { + * // property declarations are not part of ES6, though they are valid in TypeScript: + * // static staticProperty; + * // property; + * + * constructor(p) { } + * static staticMethod(p) { } + * method(p) { } + * } + * + * // constructor + * result = Reflect.getOwnMetadata("custom:annotation", Example); + * + * // property (on constructor) + * result = Reflect.getOwnMetadata("custom:annotation", Example, "staticProperty"); + * + * // property (on prototype) + * result = Reflect.getOwnMetadata("custom:annotation", Example.prototype, "property"); + * + * // method (on constructor) + * result = Reflect.getOwnMetadata("custom:annotation", Example, "staticMethod"); + * + * // method (on prototype) + * result = Reflect.getOwnMetadata("custom:annotation", Example.prototype, "method"); + * + */ + function getOwnMetadata(metadataKey, target, propertyKey) { + if (!IsObject(target)) + throw new TypeError(); + if (!IsUndefined(propertyKey)) + propertyKey = ToPropertyKey(propertyKey); + return OrdinaryGetOwnMetadata(metadataKey, target, propertyKey); + } + exporter("getOwnMetadata", getOwnMetadata); + /** + * Gets the metadata keys defined on the target object or its prototype chain. + * @param target The target object on which the metadata is defined. + * @param propertyKey (Optional) The property key for the target. + * @returns An array of unique metadata keys. + * @example + * + * class Example { + * // property declarations are not part of ES6, though they are valid in TypeScript: + * // static staticProperty; + * // property; + * + * constructor(p) { } + * static staticMethod(p) { } + * method(p) { } + * } + * + * // constructor + * result = Reflect.getMetadataKeys(Example); + * + * // property (on constructor) + * result = Reflect.getMetadataKeys(Example, "staticProperty"); + * + * // property (on prototype) + * result = Reflect.getMetadataKeys(Example.prototype, "property"); + * + * // method (on constructor) + * result = Reflect.getMetadataKeys(Example, "staticMethod"); + * + * // method (on prototype) + * result = Reflect.getMetadataKeys(Example.prototype, "method"); + * + */ + function getMetadataKeys(target, propertyKey) { + if (!IsObject(target)) + throw new TypeError(); + if (!IsUndefined(propertyKey)) + propertyKey = ToPropertyKey(propertyKey); + return OrdinaryMetadataKeys(target, propertyKey); + } + exporter("getMetadataKeys", getMetadataKeys); + /** + * Gets the unique metadata keys defined on the target object. + * @param target The target object on which the metadata is defined. + * @param propertyKey (Optional) The property key for the target. + * @returns An array of unique metadata keys. + * @example + * + * class Example { + * // property declarations are not part of ES6, though they are valid in TypeScript: + * // static staticProperty; + * // property; + * + * constructor(p) { } + * static staticMethod(p) { } + * method(p) { } + * } + * + * // constructor + * result = Reflect.getOwnMetadataKeys(Example); + * + * // property (on constructor) + * result = Reflect.getOwnMetadataKeys(Example, "staticProperty"); + * + * // property (on prototype) + * result = Reflect.getOwnMetadataKeys(Example.prototype, "property"); + * + * // method (on constructor) + * result = Reflect.getOwnMetadataKeys(Example, "staticMethod"); + * + * // method (on prototype) + * result = Reflect.getOwnMetadataKeys(Example.prototype, "method"); + * + */ + function getOwnMetadataKeys(target, propertyKey) { + if (!IsObject(target)) + throw new TypeError(); + if (!IsUndefined(propertyKey)) + propertyKey = ToPropertyKey(propertyKey); + return OrdinaryOwnMetadataKeys(target, propertyKey); + } + exporter("getOwnMetadataKeys", getOwnMetadataKeys); + /** + * Deletes the metadata entry from the target object with the provided key. + * @param metadataKey A key used to store and retrieve metadata. + * @param target The target object on which the metadata is defined. + * @param propertyKey (Optional) The property key for the target. + * @returns `true` if the metadata entry was found and deleted; otherwise, false. + * @example + * + * class Example { + * // property declarations are not part of ES6, though they are valid in TypeScript: + * // static staticProperty; + * // property; + * + * constructor(p) { } + * static staticMethod(p) { } + * method(p) { } + * } + * + * // constructor + * result = Reflect.deleteMetadata("custom:annotation", Example); + * + * // property (on constructor) + * result = Reflect.deleteMetadata("custom:annotation", Example, "staticProperty"); + * + * // property (on prototype) + * result = Reflect.deleteMetadata("custom:annotation", Example.prototype, "property"); + * + * // method (on constructor) + * result = Reflect.deleteMetadata("custom:annotation", Example, "staticMethod"); + * + * // method (on prototype) + * result = Reflect.deleteMetadata("custom:annotation", Example.prototype, "method"); + * + */ + function deleteMetadata(metadataKey, target, propertyKey) { + if (!IsObject(target)) + throw new TypeError(); + if (!IsUndefined(propertyKey)) + propertyKey = ToPropertyKey(propertyKey); + var metadataMap = GetOrCreateMetadataMap(target, propertyKey, /*Create*/ false); + if (IsUndefined(metadataMap)) + return false; + if (!metadataMap.delete(metadataKey)) + return false; + if (metadataMap.size > 0) + return true; + var targetMetadata = Metadata.get(target); + targetMetadata.delete(propertyKey); + if (targetMetadata.size > 0) + return true; + Metadata.delete(target); + return true; + } + exporter("deleteMetadata", deleteMetadata); + function DecorateConstructor(decorators, target) { + for (var i = decorators.length - 1; i >= 0; --i) { + var decorator = decorators[i]; + var decorated = decorator(target); + if (!IsUndefined(decorated) && !IsNull(decorated)) { + if (!IsConstructor(decorated)) + throw new TypeError(); + target = decorated; + } + } + return target; + } + function DecorateProperty(decorators, target, propertyKey, descriptor) { + for (var i = decorators.length - 1; i >= 0; --i) { + var decorator = decorators[i]; + var decorated = decorator(target, propertyKey, descriptor); + if (!IsUndefined(decorated) && !IsNull(decorated)) { + if (!IsObject(decorated)) + throw new TypeError(); + descriptor = decorated; + } + } + return descriptor; + } + function GetOrCreateMetadataMap(O, P, Create) { + var targetMetadata = Metadata.get(O); + if (IsUndefined(targetMetadata)) { + if (!Create) + return undefined; + targetMetadata = new _Map(); + Metadata.set(O, targetMetadata); + } + var metadataMap = targetMetadata.get(P); + if (IsUndefined(metadataMap)) { + if (!Create) + return undefined; + metadataMap = new _Map(); + targetMetadata.set(P, metadataMap); + } + return metadataMap; + } + // 3.1.1.1 OrdinaryHasMetadata(MetadataKey, O, P) + // https://rbuckton.github.io/reflect-metadata/#ordinaryhasmetadata + function OrdinaryHasMetadata(MetadataKey, O, P) { + var hasOwn = OrdinaryHasOwnMetadata(MetadataKey, O, P); + if (hasOwn) + return true; + var parent = OrdinaryGetPrototypeOf(O); + if (!IsNull(parent)) + return OrdinaryHasMetadata(MetadataKey, parent, P); + return false; + } + // 3.1.2.1 OrdinaryHasOwnMetadata(MetadataKey, O, P) + // https://rbuckton.github.io/reflect-metadata/#ordinaryhasownmetadata + function OrdinaryHasOwnMetadata(MetadataKey, O, P) { + var metadataMap = GetOrCreateMetadataMap(O, P, /*Create*/ false); + if (IsUndefined(metadataMap)) + return false; + return ToBoolean(metadataMap.has(MetadataKey)); + } + // 3.1.3.1 OrdinaryGetMetadata(MetadataKey, O, P) + // https://rbuckton.github.io/reflect-metadata/#ordinarygetmetadata + function OrdinaryGetMetadata(MetadataKey, O, P) { + var hasOwn = OrdinaryHasOwnMetadata(MetadataKey, O, P); + if (hasOwn) + return OrdinaryGetOwnMetadata(MetadataKey, O, P); + var parent = OrdinaryGetPrototypeOf(O); + if (!IsNull(parent)) + return OrdinaryGetMetadata(MetadataKey, parent, P); + return undefined; + } + // 3.1.4.1 OrdinaryGetOwnMetadata(MetadataKey, O, P) + // https://rbuckton.github.io/reflect-metadata/#ordinarygetownmetadata + function OrdinaryGetOwnMetadata(MetadataKey, O, P) { + var metadataMap = GetOrCreateMetadataMap(O, P, /*Create*/ false); + if (IsUndefined(metadataMap)) + return undefined; + return metadataMap.get(MetadataKey); + } + // 3.1.5.1 OrdinaryDefineOwnMetadata(MetadataKey, MetadataValue, O, P) + // https://rbuckton.github.io/reflect-metadata/#ordinarydefineownmetadata + function OrdinaryDefineOwnMetadata(MetadataKey, MetadataValue, O, P) { + var metadataMap = GetOrCreateMetadataMap(O, P, /*Create*/ true); + metadataMap.set(MetadataKey, MetadataValue); + } + // 3.1.6.1 OrdinaryMetadataKeys(O, P) + // https://rbuckton.github.io/reflect-metadata/#ordinarymetadatakeys + function OrdinaryMetadataKeys(O, P) { + var ownKeys = OrdinaryOwnMetadataKeys(O, P); + var parent = OrdinaryGetPrototypeOf(O); + if (parent === null) + return ownKeys; + var parentKeys = OrdinaryMetadataKeys(parent, P); + if (parentKeys.length <= 0) + return ownKeys; + if (ownKeys.length <= 0) + return parentKeys; + var set = new _Set(); + var keys = []; + for (var _i = 0, ownKeys_1 = ownKeys; _i < ownKeys_1.length; _i++) { + var key = ownKeys_1[_i]; + var hasKey = set.has(key); + if (!hasKey) { + set.add(key); + keys.push(key); + } + } + for (var _a = 0, parentKeys_1 = parentKeys; _a < parentKeys_1.length; _a++) { + var key = parentKeys_1[_a]; + var hasKey = set.has(key); + if (!hasKey) { + set.add(key); + keys.push(key); + } + } + return keys; + } + // 3.1.7.1 OrdinaryOwnMetadataKeys(O, P) + // https://rbuckton.github.io/reflect-metadata/#ordinaryownmetadatakeys + function OrdinaryOwnMetadataKeys(O, P) { + var keys = []; + var metadataMap = GetOrCreateMetadataMap(O, P, /*Create*/ false); + if (IsUndefined(metadataMap)) + return keys; + var keysObj = metadataMap.keys(); + var iterator = GetIterator(keysObj); + var k = 0; + while (true) { + var next = IteratorStep(iterator); + if (!next) { + keys.length = k; + return keys; + } + var nextValue = IteratorValue(next); + try { + keys[k] = nextValue; + } + catch (e) { + try { + IteratorClose(iterator); + } + finally { + throw e; + } + } + k++; + } + } + // 6 ECMAScript Data Typ0es and Values + // https://tc39.github.io/ecma262/#sec-ecmascript-data-types-and-values + function Type(x) { + if (x === null) + return 1 /* Null */; + switch (typeof x) { + case "undefined": return 0 /* Undefined */; + case "boolean": return 2 /* Boolean */; + case "string": return 3 /* String */; + case "symbol": return 4 /* Symbol */; + case "number": return 5 /* Number */; + case "object": return x === null ? 1 /* Null */ : 6 /* Object */; + default: return 6 /* Object */; + } + } + // 6.1.1 The Undefined Type + // https://tc39.github.io/ecma262/#sec-ecmascript-language-types-undefined-type + function IsUndefined(x) { + return x === undefined; + } + // 6.1.2 The Null Type + // https://tc39.github.io/ecma262/#sec-ecmascript-language-types-null-type + function IsNull(x) { + return x === null; + } + // 6.1.5 The Symbol Type + // https://tc39.github.io/ecma262/#sec-ecmascript-language-types-symbol-type + function IsSymbol(x) { + return typeof x === "symbol"; + } + // 6.1.7 The Object Type + // https://tc39.github.io/ecma262/#sec-object-type + function IsObject(x) { + return typeof x === "object" ? x !== null : typeof x === "function"; + } + // 7.1 Type Conversion + // https://tc39.github.io/ecma262/#sec-type-conversion + // 7.1.1 ToPrimitive(input [, PreferredType]) + // https://tc39.github.io/ecma262/#sec-toprimitive + function ToPrimitive(input, PreferredType) { + switch (Type(input)) { + case 0 /* Undefined */: return input; + case 1 /* Null */: return input; + case 2 /* Boolean */: return input; + case 3 /* String */: return input; + case 4 /* Symbol */: return input; + case 5 /* Number */: return input; + } + var hint = PreferredType === 3 /* String */ ? "string" : PreferredType === 5 /* Number */ ? "number" : "default"; + var exoticToPrim = GetMethod(input, toPrimitiveSymbol); + if (exoticToPrim !== undefined) { + var result = exoticToPrim.call(input, hint); + if (IsObject(result)) + throw new TypeError(); + return result; } + return OrdinaryToPrimitive(input, hint === "default" ? "number" : hint); } - - keys.push(parent); - } - - // Loop through children appending to the array until we hit depth - - var i = 0; - while ((segment = child.exec(key)) !== null && i < options.depth) { - i += 1; - if (!options.plainObjects && has.call(Object.prototype, segment[1].slice(1, -1))) { - if (!options.allowPrototypes) { - return; + // 7.1.1.1 OrdinaryToPrimitive(O, hint) + // https://tc39.github.io/ecma262/#sec-ordinarytoprimitive + function OrdinaryToPrimitive(O, hint) { + if (hint === "string") { + var toString_1 = O.toString; + if (IsCallable(toString_1)) { + var result = toString_1.call(O); + if (!IsObject(result)) + return result; + } + var valueOf = O.valueOf; + if (IsCallable(valueOf)) { + var result = valueOf.call(O); + if (!IsObject(result)) + return result; + } + } + else { + var valueOf = O.valueOf; + if (IsCallable(valueOf)) { + var result = valueOf.call(O); + if (!IsObject(result)) + return result; + } + var toString_2 = O.toString; + if (IsCallable(toString_2)) { + var result = toString_2.call(O); + if (!IsObject(result)) + return result; + } } + throw new TypeError(); } - keys.push(segment[1]); - } - - // If there's a remainder, just add whatever is left - - if (segment) { - keys.push('[' + key.slice(segment.index) + ']'); - } - - return parseObject(keys, val, options); -}; - -module.exports = function (str, opts) { - var options = opts ? utils.assign({}, opts) : {}; - - if (options.decoder !== null && options.decoder !== undefined && typeof options.decoder !== 'function') { - throw new TypeError('Decoder has to be a function.'); - } - - options.ignoreQueryPrefix = options.ignoreQueryPrefix === true; - options.delimiter = typeof options.delimiter === 'string' || utils.isRegExp(options.delimiter) ? options.delimiter : defaults.delimiter; - options.depth = typeof options.depth === 'number' ? options.depth : defaults.depth; - options.arrayLimit = typeof options.arrayLimit === 'number' ? options.arrayLimit : defaults.arrayLimit; - options.parseArrays = options.parseArrays !== false; - options.decoder = typeof options.decoder === 'function' ? options.decoder : defaults.decoder; - options.allowDots = typeof options.allowDots === 'boolean' ? options.allowDots : defaults.allowDots; - options.plainObjects = typeof options.plainObjects === 'boolean' ? options.plainObjects : defaults.plainObjects; - options.allowPrototypes = typeof options.allowPrototypes === 'boolean' ? options.allowPrototypes : defaults.allowPrototypes; - options.parameterLimit = typeof options.parameterLimit === 'number' ? options.parameterLimit : defaults.parameterLimit; - options.strictNullHandling = typeof options.strictNullHandling === 'boolean' ? options.strictNullHandling : defaults.strictNullHandling; - - if (str === '' || str === null || typeof str === 'undefined') { - return options.plainObjects ? Object.create(null) : {}; - } - - var tempObj = typeof str === 'string' ? parseValues(str, options) : str; - var obj = options.plainObjects ? Object.create(null) : {}; - - // Iterate over the keys and setup the new object - - var keys = Object.keys(tempObj); - for (var i = 0; i < keys.length; ++i) { - var key = keys[i]; - var newObj = parseKeys(key, tempObj[key], options); - obj = utils.merge(obj, newObj, options); - } - - return utils.compact(obj); -}; - - -/***/ }), - -/***/ 79954: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var utils = __nccwpck_require__(72360); -var formats = __nccwpck_require__(74907); - -var arrayPrefixGenerators = { - brackets: function brackets(prefix) { // eslint-disable-line func-name-matching - return prefix + '[]'; - }, - indices: function indices(prefix, key) { // eslint-disable-line func-name-matching - return prefix + '[' + key + ']'; - }, - repeat: function repeat(prefix) { // eslint-disable-line func-name-matching - return prefix; - } -}; - -var toISO = Date.prototype.toISOString; - -var defaults = { - delimiter: '&', - encode: true, - encoder: utils.encode, - encodeValuesOnly: false, - serializeDate: function serializeDate(date) { // eslint-disable-line func-name-matching - return toISO.call(date); - }, - skipNulls: false, - strictNullHandling: false -}; - -var stringify = function stringify( // eslint-disable-line func-name-matching - object, - prefix, - generateArrayPrefix, - strictNullHandling, - skipNulls, - encoder, - filter, - sort, - allowDots, - serializeDate, - formatter, - encodeValuesOnly -) { - var obj = object; - if (typeof filter === 'function') { - obj = filter(prefix, obj); - } else if (obj instanceof Date) { - obj = serializeDate(obj); - } else if (obj === null) { - if (strictNullHandling) { - return encoder && !encodeValuesOnly ? encoder(prefix, defaults.encoder) : prefix; + // 7.1.2 ToBoolean(argument) + // https://tc39.github.io/ecma262/2016/#sec-toboolean + function ToBoolean(argument) { + return !!argument; } - - obj = ''; - } - - if (typeof obj === 'string' || typeof obj === 'number' || typeof obj === 'boolean' || utils.isBuffer(obj)) { - if (encoder) { - var keyValue = encodeValuesOnly ? prefix : encoder(prefix, defaults.encoder); - return [formatter(keyValue) + '=' + formatter(encoder(obj, defaults.encoder))]; + // 7.1.12 ToString(argument) + // https://tc39.github.io/ecma262/#sec-tostring + function ToString(argument) { + return "" + argument; } - return [formatter(prefix) + '=' + formatter(String(obj))]; - } - - var values = []; - - if (typeof obj === 'undefined') { - return values; - } - - var objKeys; - if (Array.isArray(filter)) { - objKeys = filter; - } else { - var keys = Object.keys(obj); - objKeys = sort ? keys.sort(sort) : keys; - } - - for (var i = 0; i < objKeys.length; ++i) { - var key = objKeys[i]; - - if (skipNulls && obj[key] === null) { - continue; + // 7.1.14 ToPropertyKey(argument) + // https://tc39.github.io/ecma262/#sec-topropertykey + function ToPropertyKey(argument) { + var key = ToPrimitive(argument, 3 /* String */); + if (IsSymbol(key)) + return key; + return ToString(key); } - - if (Array.isArray(obj)) { - values = values.concat(stringify( - obj[key], - generateArrayPrefix(prefix, key), - generateArrayPrefix, - strictNullHandling, - skipNulls, - encoder, - filter, - sort, - allowDots, - serializeDate, - formatter, - encodeValuesOnly - )); - } else { - values = values.concat(stringify( - obj[key], - prefix + (allowDots ? '.' + key : '[' + key + ']'), - generateArrayPrefix, - strictNullHandling, - skipNulls, - encoder, - filter, - sort, - allowDots, - serializeDate, - formatter, - encodeValuesOnly - )); + // 7.2 Testing and Comparison Operations + // https://tc39.github.io/ecma262/#sec-testing-and-comparison-operations + // 7.2.2 IsArray(argument) + // https://tc39.github.io/ecma262/#sec-isarray + function IsArray(argument) { + return Array.isArray + ? Array.isArray(argument) + : argument instanceof Object + ? argument instanceof Array + : Object.prototype.toString.call(argument) === "[object Array]"; } - } - - return values; -}; - -module.exports = function (object, opts) { - var obj = object; - var options = opts ? utils.assign({}, opts) : {}; - - if (options.encoder !== null && options.encoder !== undefined && typeof options.encoder !== 'function') { - throw new TypeError('Encoder has to be a function.'); - } - - var delimiter = typeof options.delimiter === 'undefined' ? defaults.delimiter : options.delimiter; - var strictNullHandling = typeof options.strictNullHandling === 'boolean' ? options.strictNullHandling : defaults.strictNullHandling; - var skipNulls = typeof options.skipNulls === 'boolean' ? options.skipNulls : defaults.skipNulls; - var encode = typeof options.encode === 'boolean' ? options.encode : defaults.encode; - var encoder = typeof options.encoder === 'function' ? options.encoder : defaults.encoder; - var sort = typeof options.sort === 'function' ? options.sort : null; - var allowDots = typeof options.allowDots === 'undefined' ? false : options.allowDots; - var serializeDate = typeof options.serializeDate === 'function' ? options.serializeDate : defaults.serializeDate; - var encodeValuesOnly = typeof options.encodeValuesOnly === 'boolean' ? options.encodeValuesOnly : defaults.encodeValuesOnly; - if (typeof options.format === 'undefined') { - options.format = formats['default']; - } else if (!Object.prototype.hasOwnProperty.call(formats.formatters, options.format)) { - throw new TypeError('Unknown format option provided.'); - } - var formatter = formats.formatters[options.format]; - var objKeys; - var filter; - - if (typeof options.filter === 'function') { - filter = options.filter; - obj = filter('', obj); - } else if (Array.isArray(options.filter)) { - filter = options.filter; - objKeys = filter; - } - - var keys = []; - - if (typeof obj !== 'object' || obj === null) { - return ''; - } - - var arrayFormat; - if (options.arrayFormat in arrayPrefixGenerators) { - arrayFormat = options.arrayFormat; - } else if ('indices' in options) { - arrayFormat = options.indices ? 'indices' : 'repeat'; - } else { - arrayFormat = 'indices'; - } - - var generateArrayPrefix = arrayPrefixGenerators[arrayFormat]; - - if (!objKeys) { - objKeys = Object.keys(obj); - } - - if (sort) { - objKeys.sort(sort); - } - - for (var i = 0; i < objKeys.length; ++i) { - var key = objKeys[i]; - - if (skipNulls && obj[key] === null) { - continue; + // 7.2.3 IsCallable(argument) + // https://tc39.github.io/ecma262/#sec-iscallable + function IsCallable(argument) { + // NOTE: This is an approximation as we cannot check for [[Call]] internal method. + return typeof argument === "function"; } - - keys = keys.concat(stringify( - obj[key], - key, - generateArrayPrefix, - strictNullHandling, - skipNulls, - encode ? encoder : null, - filter, - sort, - allowDots, - serializeDate, - formatter, - encodeValuesOnly - )); - } - - var joined = keys.join(delimiter); - var prefix = options.addQueryPrefix === true ? '?' : ''; - - return joined.length > 0 ? prefix + joined : ''; -}; - - -/***/ }), - -/***/ 72360: -/***/ ((module) => { - -"use strict"; - - -var has = Object.prototype.hasOwnProperty; - -var hexTable = (function () { - var array = []; - for (var i = 0; i < 256; ++i) { - array.push('%' + ((i < 16 ? '0' : '') + i.toString(16)).toUpperCase()); - } - - return array; -}()); - -var compactQueue = function compactQueue(queue) { - var obj; - - while (queue.length) { - var item = queue.pop(); - obj = item.obj[item.prop]; - - if (Array.isArray(obj)) { - var compacted = []; - - for (var j = 0; j < obj.length; ++j) { - if (typeof obj[j] !== 'undefined') { - compacted.push(obj[j]); - } + // 7.2.4 IsConstructor(argument) + // https://tc39.github.io/ecma262/#sec-isconstructor + function IsConstructor(argument) { + // NOTE: This is an approximation as we cannot check for [[Construct]] internal method. + return typeof argument === "function"; + } + // 7.2.7 IsPropertyKey(argument) + // https://tc39.github.io/ecma262/#sec-ispropertykey + function IsPropertyKey(argument) { + switch (Type(argument)) { + case 3 /* String */: return true; + case 4 /* Symbol */: return true; + default: return false; } - - item.obj[item.prop] = compacted; } - } - - return obj; -}; - -var arrayToObject = function arrayToObject(source, options) { - var obj = options && options.plainObjects ? Object.create(null) : {}; - for (var i = 0; i < source.length; ++i) { - if (typeof source[i] !== 'undefined') { - obj[i] = source[i]; + // 7.3 Operations on Objects + // https://tc39.github.io/ecma262/#sec-operations-on-objects + // 7.3.9 GetMethod(V, P) + // https://tc39.github.io/ecma262/#sec-getmethod + function GetMethod(V, P) { + var func = V[P]; + if (func === undefined || func === null) + return undefined; + if (!IsCallable(func)) + throw new TypeError(); + return func; } - } - - return obj; -}; - -var merge = function merge(target, source, options) { - if (!source) { - return target; - } - - if (typeof source !== 'object') { - if (Array.isArray(target)) { - target.push(source); - } else if (typeof target === 'object') { - if (options.plainObjects || options.allowPrototypes || !has.call(Object.prototype, source)) { - target[source] = true; - } - } else { - return [target, source]; + // 7.4 Operations on Iterator Objects + // https://tc39.github.io/ecma262/#sec-operations-on-iterator-objects + function GetIterator(obj) { + var method = GetMethod(obj, iteratorSymbol); + if (!IsCallable(method)) + throw new TypeError(); // from Call + var iterator = method.call(obj); + if (!IsObject(iterator)) + throw new TypeError(); + return iterator; } - - return target; - } - - if (typeof target !== 'object') { - return [target].concat(source); - } - - var mergeTarget = target; - if (Array.isArray(target) && !Array.isArray(source)) { - mergeTarget = arrayToObject(target, options); - } - - if (Array.isArray(target) && Array.isArray(source)) { - source.forEach(function (item, i) { - if (has.call(target, i)) { - if (target[i] && typeof target[i] === 'object') { - target[i] = merge(target[i], item, options); - } else { - target.push(item); - } - } else { - target[i] = item; - } - }); - return target; - } - - return Object.keys(source).reduce(function (acc, key) { - var value = source[key]; - - if (has.call(acc, key)) { - acc[key] = merge(acc[key], value, options); - } else { - acc[key] = value; + // 7.4.4 IteratorValue(iterResult) + // https://tc39.github.io/ecma262/2016/#sec-iteratorvalue + function IteratorValue(iterResult) { + return iterResult.value; } - return acc; - }, mergeTarget); -}; - -var assign = function assignSingleSource(target, source) { - return Object.keys(source).reduce(function (acc, key) { - acc[key] = source[key]; - return acc; - }, target); -}; - -var decode = function (str) { - try { - return decodeURIComponent(str.replace(/\+/g, ' ')); - } catch (e) { - return str; - } -}; - -var encode = function encode(str) { - // This code was originally written by Brian White (mscdex) for the io.js core querystring library. - // It has been adapted here for stricter adherence to RFC 3986 - if (str.length === 0) { - return str; - } - - var string = typeof str === 'string' ? str : String(str); - - var out = ''; - for (var i = 0; i < string.length; ++i) { - var c = string.charCodeAt(i); - - if ( - c === 0x2D // - - || c === 0x2E // . - || c === 0x5F // _ - || c === 0x7E // ~ - || (c >= 0x30 && c <= 0x39) // 0-9 - || (c >= 0x41 && c <= 0x5A) // a-z - || (c >= 0x61 && c <= 0x7A) // A-Z - ) { - out += string.charAt(i); - continue; + // 7.4.5 IteratorStep(iterator) + // https://tc39.github.io/ecma262/#sec-iteratorstep + function IteratorStep(iterator) { + var result = iterator.next(); + return result.done ? false : result; } - - if (c < 0x80) { - out = out + hexTable[c]; - continue; + // 7.4.6 IteratorClose(iterator, completion) + // https://tc39.github.io/ecma262/#sec-iteratorclose + function IteratorClose(iterator) { + var f = iterator["return"]; + if (f) + f.call(iterator); } - - if (c < 0x800) { - out = out + (hexTable[0xC0 | (c >> 6)] + hexTable[0x80 | (c & 0x3F)]); - continue; + // 9.1 Ordinary Object Internal Methods and Internal Slots + // https://tc39.github.io/ecma262/#sec-ordinary-object-internal-methods-and-internal-slots + // 9.1.1.1 OrdinaryGetPrototypeOf(O) + // https://tc39.github.io/ecma262/#sec-ordinarygetprototypeof + function OrdinaryGetPrototypeOf(O) { + var proto = Object.getPrototypeOf(O); + if (typeof O !== "function" || O === functionPrototype) + return proto; + // TypeScript doesn't set __proto__ in ES5, as it's non-standard. + // Try to determine the superclass constructor. Compatible implementations + // must either set __proto__ on a subclass constructor to the superclass constructor, + // or ensure each class has a valid `constructor` property on its prototype that + // points back to the constructor. + // If this is not the same as Function.[[Prototype]], then this is definately inherited. + // This is the case when in ES6 or when using __proto__ in a compatible browser. + if (proto !== functionPrototype) + return proto; + // If the super prototype is Object.prototype, null, or undefined, then we cannot determine the heritage. + var prototype = O.prototype; + var prototypeProto = prototype && Object.getPrototypeOf(prototype); + if (prototypeProto == null || prototypeProto === Object.prototype) + return proto; + // If the constructor was not a function, then we cannot determine the heritage. + var constructor = prototypeProto.constructor; + if (typeof constructor !== "function") + return proto; + // If we have some kind of self-reference, then we cannot determine the heritage. + if (constructor === O) + return proto; + // we have a pretty good guess at the heritage. + return constructor; } - - if (c < 0xD800 || c >= 0xE000) { - out = out + (hexTable[0xE0 | (c >> 12)] + hexTable[0x80 | ((c >> 6) & 0x3F)] + hexTable[0x80 | (c & 0x3F)]); - continue; + // naive Map shim + function CreateMapPolyfill() { + var cacheSentinel = {}; + var arraySentinel = []; + var MapIterator = /** @class */ (function () { + function MapIterator(keys, values, selector) { + this._index = 0; + this._keys = keys; + this._values = values; + this._selector = selector; + } + MapIterator.prototype["@@iterator"] = function () { return this; }; + MapIterator.prototype[iteratorSymbol] = function () { return this; }; + MapIterator.prototype.next = function () { + var index = this._index; + if (index >= 0 && index < this._keys.length) { + var result = this._selector(this._keys[index], this._values[index]); + if (index + 1 >= this._keys.length) { + this._index = -1; + this._keys = arraySentinel; + this._values = arraySentinel; + } + else { + this._index++; + } + return { value: result, done: false }; + } + return { value: undefined, done: true }; + }; + MapIterator.prototype.throw = function (error) { + if (this._index >= 0) { + this._index = -1; + this._keys = arraySentinel; + this._values = arraySentinel; + } + throw error; + }; + MapIterator.prototype.return = function (value) { + if (this._index >= 0) { + this._index = -1; + this._keys = arraySentinel; + this._values = arraySentinel; + } + return { value: value, done: true }; + }; + return MapIterator; + }()); + return /** @class */ (function () { + function Map() { + this._keys = []; + this._values = []; + this._cacheKey = cacheSentinel; + this._cacheIndex = -2; + } + Object.defineProperty(Map.prototype, "size", { + get: function () { return this._keys.length; }, + enumerable: true, + configurable: true + }); + Map.prototype.has = function (key) { return this._find(key, /*insert*/ false) >= 0; }; + Map.prototype.get = function (key) { + var index = this._find(key, /*insert*/ false); + return index >= 0 ? this._values[index] : undefined; + }; + Map.prototype.set = function (key, value) { + var index = this._find(key, /*insert*/ true); + this._values[index] = value; + return this; + }; + Map.prototype.delete = function (key) { + var index = this._find(key, /*insert*/ false); + if (index >= 0) { + var size = this._keys.length; + for (var i = index + 1; i < size; i++) { + this._keys[i - 1] = this._keys[i]; + this._values[i - 1] = this._values[i]; + } + this._keys.length--; + this._values.length--; + if (key === this._cacheKey) { + this._cacheKey = cacheSentinel; + this._cacheIndex = -2; + } + return true; + } + return false; + }; + Map.prototype.clear = function () { + this._keys.length = 0; + this._values.length = 0; + this._cacheKey = cacheSentinel; + this._cacheIndex = -2; + }; + Map.prototype.keys = function () { return new MapIterator(this._keys, this._values, getKey); }; + Map.prototype.values = function () { return new MapIterator(this._keys, this._values, getValue); }; + Map.prototype.entries = function () { return new MapIterator(this._keys, this._values, getEntry); }; + Map.prototype["@@iterator"] = function () { return this.entries(); }; + Map.prototype[iteratorSymbol] = function () { return this.entries(); }; + Map.prototype._find = function (key, insert) { + if (this._cacheKey !== key) { + this._cacheIndex = this._keys.indexOf(this._cacheKey = key); + } + if (this._cacheIndex < 0 && insert) { + this._cacheIndex = this._keys.length; + this._keys.push(key); + this._values.push(undefined); + } + return this._cacheIndex; + }; + return Map; + }()); + function getKey(key, _) { + return key; + } + function getValue(_, value) { + return value; + } + function getEntry(key, value) { + return [key, value]; + } } - - i += 1; - c = 0x10000 + (((c & 0x3FF) << 10) | (string.charCodeAt(i) & 0x3FF)); - out += hexTable[0xF0 | (c >> 18)] - + hexTable[0x80 | ((c >> 12) & 0x3F)] - + hexTable[0x80 | ((c >> 6) & 0x3F)] - + hexTable[0x80 | (c & 0x3F)]; - } - - return out; -}; - -var compact = function compact(value) { - var queue = [{ obj: { o: value }, prop: 'o' }]; - var refs = []; - - for (var i = 0; i < queue.length; ++i) { - var item = queue[i]; - var obj = item.obj[item.prop]; - - var keys = Object.keys(obj); - for (var j = 0; j < keys.length; ++j) { - var key = keys[j]; - var val = obj[key]; - if (typeof val === 'object' && val !== null && refs.indexOf(val) === -1) { - queue.push({ obj: obj, prop: key }); - refs.push(val); + // naive Set shim + function CreateSetPolyfill() { + return /** @class */ (function () { + function Set() { + this._map = new _Map(); + } + Object.defineProperty(Set.prototype, "size", { + get: function () { return this._map.size; }, + enumerable: true, + configurable: true + }); + Set.prototype.has = function (value) { return this._map.has(value); }; + Set.prototype.add = function (value) { return this._map.set(value, value), this; }; + Set.prototype.delete = function (value) { return this._map.delete(value); }; + Set.prototype.clear = function () { this._map.clear(); }; + Set.prototype.keys = function () { return this._map.keys(); }; + Set.prototype.values = function () { return this._map.values(); }; + Set.prototype.entries = function () { return this._map.entries(); }; + Set.prototype["@@iterator"] = function () { return this.keys(); }; + Set.prototype[iteratorSymbol] = function () { return this.keys(); }; + return Set; + }()); + } + // naive WeakMap shim + function CreateWeakMapPolyfill() { + var UUID_SIZE = 16; + var keys = HashMap.create(); + var rootKey = CreateUniqueKey(); + return /** @class */ (function () { + function WeakMap() { + this._key = CreateUniqueKey(); + } + WeakMap.prototype.has = function (target) { + var table = GetOrCreateWeakMapTable(target, /*create*/ false); + return table !== undefined ? HashMap.has(table, this._key) : false; + }; + WeakMap.prototype.get = function (target) { + var table = GetOrCreateWeakMapTable(target, /*create*/ false); + return table !== undefined ? HashMap.get(table, this._key) : undefined; + }; + WeakMap.prototype.set = function (target, value) { + var table = GetOrCreateWeakMapTable(target, /*create*/ true); + table[this._key] = value; + return this; + }; + WeakMap.prototype.delete = function (target) { + var table = GetOrCreateWeakMapTable(target, /*create*/ false); + return table !== undefined ? delete table[this._key] : false; + }; + WeakMap.prototype.clear = function () { + // NOTE: not a real clear, just makes the previous data unreachable + this._key = CreateUniqueKey(); + }; + return WeakMap; + }()); + function CreateUniqueKey() { + var key; + do + key = "@@WeakMap@@" + CreateUUID(); + while (HashMap.has(keys, key)); + keys[key] = true; + return key; + } + function GetOrCreateWeakMapTable(target, create) { + if (!hasOwn.call(target, rootKey)) { + if (!create) + return undefined; + Object.defineProperty(target, rootKey, { value: HashMap.create() }); + } + return target[rootKey]; + } + function FillRandomBytes(buffer, size) { + for (var i = 0; i < size; ++i) + buffer[i] = Math.random() * 0xff | 0; + return buffer; + } + function GenRandomBytes(size) { + if (typeof Uint8Array === "function") { + if (typeof crypto !== "undefined") + return crypto.getRandomValues(new Uint8Array(size)); + if (typeof msCrypto !== "undefined") + return msCrypto.getRandomValues(new Uint8Array(size)); + return FillRandomBytes(new Uint8Array(size), size); + } + return FillRandomBytes(new Array(size), size); + } + function CreateUUID() { + var data = GenRandomBytes(UUID_SIZE); + // mark as random - RFC 4122 § 4.4 + data[6] = data[6] & 0x4f | 0x40; + data[8] = data[8] & 0xbf | 0x80; + var result = ""; + for (var offset = 0; offset < UUID_SIZE; ++offset) { + var byte = data[offset]; + if (offset === 4 || offset === 6 || offset === 8) + result += "-"; + if (byte < 16) + result += "0"; + result += byte.toString(16).toLowerCase(); + } + return result; } } - } - - return compactQueue(queue); -}; - -var isRegExp = function isRegExp(obj) { - return Object.prototype.toString.call(obj) === '[object RegExp]'; -}; - -var isBuffer = function isBuffer(obj) { - if (obj === null || typeof obj === 'undefined') { - return false; - } - - return !!(obj.constructor && obj.constructor.isBuffer && obj.constructor.isBuffer(obj)); -}; - -module.exports = { - arrayToObject: arrayToObject, - assign: assign, - compact: compact, - decode: decode, - encode: encode, - isBuffer: isBuffer, - isRegExp: isRegExp, - merge: merge -}; - - -/***/ }), - -/***/ 67214: -/***/ ((module) => { - -"use strict"; - - -const codes = {}; - -function createErrorType(code, message, Base) { - if (!Base) { - Base = Error - } - - function getMessage (arg1, arg2, arg3) { - if (typeof message === 'string') { - return message - } else { - return message(arg1, arg2, arg3) - } - } - - class NodeError extends Base { - constructor (arg1, arg2, arg3) { - super(getMessage(arg1, arg2, arg3)); - } - } - - NodeError.prototype.name = Base.name; - NodeError.prototype.code = code; - - codes[code] = NodeError; -} - -// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js -function oneOf(expected, thing) { - if (Array.isArray(expected)) { - const len = expected.length; - expected = expected.map((i) => String(i)); - if (len > 2) { - return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + - expected[len - 1]; - } else if (len === 2) { - return `one of ${thing} ${expected[0]} or ${expected[1]}`; - } else { - return `of ${thing} ${expected[0]}`; - } - } else { - return `of ${thing} ${String(expected)}`; - } -} - -// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith -function startsWith(str, search, pos) { - return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; -} - -// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith -function endsWith(str, search, this_len) { - if (this_len === undefined || this_len > str.length) { - this_len = str.length; - } - return str.substring(this_len - search.length, this_len) === search; -} - -// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes -function includes(str, search, start) { - if (typeof start !== 'number') { - start = 0; - } - - if (start + search.length > str.length) { - return false; - } else { - return str.indexOf(search, start) !== -1; - } -} - -createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { - return 'The value "' + value + '" is invalid for option "' + name + '"' -}, TypeError); -createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { - // determiner: 'must be' or 'must not be' - let determiner; - if (typeof expected === 'string' && startsWith(expected, 'not ')) { - determiner = 'must not be'; - expected = expected.replace(/^not /, ''); - } else { - determiner = 'must be'; - } - - let msg; - if (endsWith(name, ' argument')) { - // For cases like 'first argument' - msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; - } else { - const type = includes(name, '.') ? 'property' : 'argument'; - msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; - } - - msg += `. Received type ${typeof actual}`; - return msg; -}, TypeError); -createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); -createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { - return 'The ' + name + ' method is not implemented' -}); -createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); -createErrorType('ERR_STREAM_DESTROYED', function (name) { - return 'Cannot call ' + name + ' after a stream was destroyed'; -}); -createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); -createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); -createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); -createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); -createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { - return 'Unknown encoding: ' + arg -}, TypeError); -createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); - -module.exports.q = codes; + // uses a heuristic used by v8 and chakra to force an object into dictionary mode. + function MakeDictionary(obj) { + obj.__ = undefined; + delete obj.__; + return obj; + } + }); +})(Reflect || (Reflect = {})); /***/ }), -/***/ 41359: +/***/ 48699: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. +// Copyright 2010-2012 Mikeal Rogers // -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. -// a duplex stream is just a stream that is both readable and writable. -// Since JS doesn't have multiple prototypal inheritance, this class -// prototypally inherits from Readable, and then parasitically from -// Writable. - -/**/ - -var objectKeys = Object.keys || function (obj) { - var keys = []; - - for (var key in obj) { - keys.push(key); - } - - return keys; -}; -/**/ - +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. -module.exports = Duplex; -var Readable = __nccwpck_require__(51433); -var Writable = __nccwpck_require__(26993); +var extend = __nccwpck_require__(38171) +var cookies = __nccwpck_require__(50976) +var helpers = __nccwpck_require__(74845) -__nccwpck_require__(44124)(Duplex, Readable); +var paramsHaveRequestBody = helpers.paramsHaveRequestBody -{ - // Allow the keys array to be GC'ed. - var keys = objectKeys(Writable.prototype); +// organize params for patch, post, put, head, del +function initParams (uri, options, callback) { + if (typeof options === 'function') { + callback = options + } - for (var v = 0; v < keys.length; v++) { - var method = keys[v]; - if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + var params = {} + if (options !== null && typeof options === 'object') { + extend(params, options, {uri: uri}) + } else if (typeof uri === 'string') { + extend(params, {uri: uri}) + } else { + extend(params, uri) } + + params.callback = callback || params.callback + return params } -function Duplex(options) { - if (!(this instanceof Duplex)) return new Duplex(options); - Readable.call(this, options); - Writable.call(this, options); - this.allowHalfOpen = true; +function request (uri, options, callback) { + if (typeof uri === 'undefined') { + throw new Error('undefined is not a valid uri or options object.') + } - if (options) { - if (options.readable === false) this.readable = false; - if (options.writable === false) this.writable = false; + var params = initParams(uri, options, callback) - if (options.allowHalfOpen === false) { - this.allowHalfOpen = false; - this.once('end', onend); - } + if (params.method === 'HEAD' && paramsHaveRequestBody(params)) { + throw new Error('HTTP HEAD requests MUST NOT include a request body.') } + + return new request.Request(params) } -Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState.highWaterMark; - } -}); -Object.defineProperty(Duplex.prototype, 'writableBuffer', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState && this._writableState.getBuffer(); - } -}); -Object.defineProperty(Duplex.prototype, 'writableLength', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState.length; +function verbFunc (verb) { + var method = verb.toUpperCase() + return function (uri, options, callback) { + var params = initParams(uri, options, callback) + params.method = method + return request(params, params.callback) } -}); // the no-half-open enforcer +} -function onend() { - // If the writable side ended, then we're ok. - if (this._writableState.ended) return; // no more data can be written. - // But allow more writes to happen in this tick. +// define like this to please codeintel/intellisense IDEs +request.get = verbFunc('get') +request.head = verbFunc('head') +request.options = verbFunc('options') +request.post = verbFunc('post') +request.put = verbFunc('put') +request.patch = verbFunc('patch') +request.del = verbFunc('delete') +request['delete'] = verbFunc('delete') - process.nextTick(onEndNT, this); +request.jar = function (store) { + return cookies.jar(store) } -function onEndNT(self) { - self.end(); +request.cookie = function (str) { + return cookies.parse(str) } -Object.defineProperty(Duplex.prototype, 'destroyed', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - if (this._readableState === undefined || this._writableState === undefined) { - return false; - } +function wrapRequestMethod (method, options, requester, verb) { + return function (uri, opts, callback) { + var params = initParams(uri, opts, callback) - return this._readableState.destroyed && this._writableState.destroyed; - }, - set: function set(value) { - // we ignore the value if the stream - // has not been initialized yet - if (this._readableState === undefined || this._writableState === undefined) { - return; - } // backward compatibility, the user is explicitly - // managing destroyed + var target = {} + extend(true, target, options, params) + + target.pool = params.pool || options.pool + if (verb) { + target.method = verb.toUpperCase() + } - this._readableState.destroyed = value; - this._writableState.destroyed = value; + if (typeof requester === 'function') { + method = requester + } + + return method(target, target.callback) } -}); +} -/***/ }), +request.defaults = function (options, requester) { + var self = this -/***/ 81542: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + options = options || {} -"use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. -// a passthrough stream. -// basically just the most minimal sort of Transform stream. -// Every written chunk gets output as-is. + if (typeof options === 'function') { + requester = options + options = {} + } + var defaults = wrapRequestMethod(self, options, requester) -module.exports = PassThrough; + var verbs = ['get', 'head', 'post', 'put', 'patch', 'del', 'delete'] + verbs.forEach(function (verb) { + defaults[verb] = wrapRequestMethod(self[verb], options, requester, verb) + }) -var Transform = __nccwpck_require__(34415); + defaults.cookie = wrapRequestMethod(self.cookie, options, requester) + defaults.jar = self.jar + defaults.defaults = self.defaults + return defaults +} -__nccwpck_require__(44124)(PassThrough, Transform); +request.forever = function (agentOptions, optionsArg) { + var options = {} + if (optionsArg) { + extend(options, optionsArg) + } + if (agentOptions) { + options.agentOptions = agentOptions + } -function PassThrough(options) { - if (!(this instanceof PassThrough)) return new PassThrough(options); - Transform.call(this, options); + options.forever = true + return request.defaults(options) } -PassThrough.prototype._transform = function (chunk, encoding, cb) { - cb(null, chunk); -}; +// Exports -/***/ }), +module.exports = request +request.Request = __nccwpck_require__(70304) +request.initParams = initParams -/***/ 51433: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +// Backwards compatibility for request.debug +Object.defineProperty(request, 'debug', { + enumerable: true, + get: function () { + return request.Request.debug + }, + set: function (debug) { + request.Request.debug = debug + } +}) -"use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. +/***/ }), -module.exports = Readable; -/**/ +/***/ 76996: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -var Duplex; -/**/ +"use strict"; -Readable.ReadableState = ReadableState; -/**/ -var EE = __nccwpck_require__(28614).EventEmitter; +var caseless = __nccwpck_require__(35684) +var uuid = __nccwpck_require__(71435) +var helpers = __nccwpck_require__(74845) -var EElistenerCount = function EElistenerCount(emitter, type) { - return emitter.listeners(type).length; -}; -/**/ +var md5 = helpers.md5 +var toBase64 = helpers.toBase64 -/**/ +function Auth (request) { + // define all public properties here + this.request = request + this.hasAuth = false + this.sentAuth = false + this.bearerToken = null + this.user = null + this.pass = null +} +Auth.prototype.basic = function (user, pass, sendImmediately) { + var self = this + if (typeof user !== 'string' || (pass !== undefined && typeof pass !== 'string')) { + self.request.emit('error', new Error('auth() received invalid user or password')) + } + self.user = user + self.pass = pass + self.hasAuth = true + var header = user + ':' + (pass || '') + if (sendImmediately || typeof sendImmediately === 'undefined') { + var authHeader = 'Basic ' + toBase64(header) + self.sentAuth = true + return authHeader + } +} -var Stream = __nccwpck_require__(62387); -/**/ +Auth.prototype.bearer = function (bearer, sendImmediately) { + var self = this + self.bearerToken = bearer + self.hasAuth = true + if (sendImmediately || typeof sendImmediately === 'undefined') { + if (typeof bearer === 'function') { + bearer = bearer() + } + var authHeader = 'Bearer ' + (bearer || '') + self.sentAuth = true + return authHeader + } +} +Auth.prototype.digest = function (method, path, authHeader) { + // TODO: More complete implementation of RFC 2617. + // - handle challenge.domain + // - support qop="auth-int" only + // - handle Authentication-Info (not necessarily?) + // - check challenge.stale (not necessarily?) + // - increase nc (not necessarily?) + // For reference: + // http://tools.ietf.org/html/rfc2617#section-3 + // https://github.com/bagder/curl/blob/master/lib/http_digest.c -var Buffer = __nccwpck_require__(64293).Buffer; + var self = this -var OurUint8Array = global.Uint8Array || function () {}; + var challenge = {} + var re = /([a-z0-9_-]+)=(?:"([^"]+)"|([a-z0-9_-]+))/gi + while (true) { + var match = re.exec(authHeader) + if (!match) { + break + } + challenge[match[1]] = match[2] || match[3] + } -function _uint8ArrayToBuffer(chunk) { - return Buffer.from(chunk); -} + /** + * RFC 2617: handle both MD5 and MD5-sess algorithms. + * + * If the algorithm directive's value is "MD5" or unspecified, then HA1 is + * HA1=MD5(username:realm:password) + * If the algorithm directive's value is "MD5-sess", then HA1 is + * HA1=MD5(MD5(username:realm:password):nonce:cnonce) + */ + var ha1Compute = function (algorithm, user, realm, pass, nonce, cnonce) { + var ha1 = md5(user + ':' + realm + ':' + pass) + if (algorithm && algorithm.toLowerCase() === 'md5-sess') { + return md5(ha1 + ':' + nonce + ':' + cnonce) + } else { + return ha1 + } + } -function _isUint8Array(obj) { - return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; + var qop = /(^|,)\s*auth\s*($|,)/.test(challenge.qop) && 'auth' + var nc = qop && '00000001' + var cnonce = qop && uuid().replace(/-/g, '') + var ha1 = ha1Compute(challenge.algorithm, self.user, challenge.realm, self.pass, challenge.nonce, cnonce) + var ha2 = md5(method + ':' + path) + var digestResponse = qop + ? md5(ha1 + ':' + challenge.nonce + ':' + nc + ':' + cnonce + ':' + qop + ':' + ha2) + : md5(ha1 + ':' + challenge.nonce + ':' + ha2) + var authValues = { + username: self.user, + realm: challenge.realm, + nonce: challenge.nonce, + uri: path, + qop: qop, + response: digestResponse, + nc: nc, + cnonce: cnonce, + algorithm: challenge.algorithm, + opaque: challenge.opaque + } + + authHeader = [] + for (var k in authValues) { + if (authValues[k]) { + if (k === 'qop' || k === 'nc' || k === 'algorithm') { + authHeader.push(k + '=' + authValues[k]) + } else { + authHeader.push(k + '="' + authValues[k] + '"') + } + } + } + authHeader = 'Digest ' + authHeader.join(', ') + self.sentAuth = true + return authHeader } -/**/ +Auth.prototype.onRequest = function (user, pass, sendImmediately, bearer) { + var self = this + var request = self.request -var debugUtil = __nccwpck_require__(31669); + var authHeader + if (bearer === undefined && user === undefined) { + self.request.emit('error', new Error('no auth mechanism defined')) + } else if (bearer !== undefined) { + authHeader = self.bearer(bearer, sendImmediately) + } else { + authHeader = self.basic(user, pass, sendImmediately) + } + if (authHeader) { + request.setHeader('authorization', authHeader) + } +} -var debug; +Auth.prototype.onResponse = function (response) { + var self = this + var request = self.request -if (debugUtil && debugUtil.debuglog) { - debug = debugUtil.debuglog('stream'); -} else { - debug = function debug() {}; -} -/**/ + if (!self.hasAuth || self.sentAuth) { return null } + var c = caseless(response.headers) -var BufferList = __nccwpck_require__(52746); + var authHeader = c.get('www-authenticate') + var authVerb = authHeader && authHeader.split(' ')[0].toLowerCase() + request.debug('reauth', authVerb) -var destroyImpl = __nccwpck_require__(97049); + switch (authVerb) { + case 'basic': + return self.basic(self.user, self.pass, true) -var _require = __nccwpck_require__(39948), - getHighWaterMark = _require.getHighWaterMark; + case 'bearer': + return self.bearer(self.bearerToken, true) -var _require$codes = __nccwpck_require__(67214)/* .codes */ .q, - ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, - ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, - ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, - ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; // Lazy loaded to improve the startup performance. + case 'digest': + return self.digest(request.method, request.path, authHeader) + } +} +exports.g = Auth -var StringDecoder; -var createReadableStreamAsyncIterator; -var from; -__nccwpck_require__(44124)(Readable, Stream); +/***/ }), -var errorOrDestroy = destroyImpl.errorOrDestroy; -var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; +/***/ 50976: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -function prependListener(emitter, event, fn) { - // Sadly this is not cacheable as some libraries bundle their own - // event emitter implementation with them. - if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); // This is a hack to make sure that our error handler is attached before any - // userland ones. NEVER DO THIS. This is here only because this code needs - // to continue to work with older versions of Node.js that do not include - // the prependListener() method. The goal is to eventually remove this hack. +"use strict"; - if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; -} -function ReadableState(options, stream, isDuplex) { - Duplex = Duplex || __nccwpck_require__(41359); - options = options || {}; // Duplex streams are both readable and writable, but share - // the same options object. - // However, some cases require setting options to different - // values for the readable and the writable sides of the duplex stream. - // These options can be provided separately as readableXXX and writableXXX. +var tough = __nccwpck_require__(47279) - if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag. Used to make read(n) ignore n and to - // make all the buffer merging and length checks go away +var Cookie = tough.Cookie +var CookieJar = tough.CookieJar - this.objectMode = !!options.objectMode; - if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; // the point at which it stops calling _read() to fill the buffer - // Note: 0 is a valid value, means "don't call _read preemptively ever" +exports.parse = function (str) { + if (str && str.uri) { + str = str.uri + } + if (typeof str !== 'string') { + throw new Error('The cookie function only accepts STRING as param') + } + return Cookie.parse(str, {loose: true}) +} - this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); // A linked list is used to store data chunks instead of an array because the - // linked list can remove elements from the beginning faster than - // array.shift() +// Adapt the sometimes-Async api of tough.CookieJar to our requirements +function RequestJar (store) { + var self = this + self._jar = new CookieJar(store, {looseMode: true}) +} +RequestJar.prototype.setCookie = function (cookieOrStr, uri, options) { + var self = this + return self._jar.setCookieSync(cookieOrStr, uri, options || {}) +} +RequestJar.prototype.getCookieString = function (uri) { + var self = this + return self._jar.getCookieStringSync(uri) +} +RequestJar.prototype.getCookies = function (uri) { + var self = this + return self._jar.getCookiesSync(uri) +} - this.buffer = new BufferList(); - this.length = 0; - this.pipes = null; - this.pipesCount = 0; - this.flowing = null; - this.ended = false; - this.endEmitted = false; - this.reading = false; // a flag to be able to tell if the event 'readable'/'data' is emitted - // immediately, or on a later tick. We set this to true at first, because - // any actions that shouldn't happen until "later" should generally also - // not happen before the first read call. +exports.jar = function (store) { + return new RequestJar(store) +} - this.sync = true; // whenever we return null, then we set a flag to say - // that we're awaiting a 'readable' event emission. - this.needReadable = false; - this.emittedReadable = false; - this.readableListening = false; - this.resumeScheduled = false; - this.paused = true; // Should close be emitted on destroy. Defaults to true. +/***/ }), - this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'end' (and potentially 'finish') +/***/ 75654: +/***/ ((module) => { - this.autoDestroy = !!options.autoDestroy; // has it been destroyed +"use strict"; - this.destroyed = false; // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = options.defaultEncoding || 'utf8'; // the number of writers that are awaiting a drain event in .pipe()s +function formatHostname (hostname) { + // canonicalize the hostname, so that 'oogle.com' won't match 'google.com' + return hostname.replace(/^\.*/, '.').toLowerCase() +} - this.awaitDrain = 0; // if true, a maybeReadMore has been scheduled +function parseNoProxyZone (zone) { + zone = zone.trim().toLowerCase() - this.readingMore = false; - this.decoder = null; - this.encoding = null; + var zoneParts = zone.split(':', 2) + var zoneHost = formatHostname(zoneParts[0]) + var zonePort = zoneParts[1] + var hasPort = zone.indexOf(':') > -1 - if (options.encoding) { - if (!StringDecoder) StringDecoder = __nccwpck_require__(94841)/* .StringDecoder */ .s; - this.decoder = new StringDecoder(options.encoding); - this.encoding = options.encoding; - } + return {hostname: zoneHost, port: zonePort, hasPort: hasPort} } -function Readable(options) { - Duplex = Duplex || __nccwpck_require__(41359); - if (!(this instanceof Readable)) return new Readable(options); // Checking for a Stream.Duplex instance is faster here instead of inside - // the ReadableState constructor, at least with V8 6.5 - - var isDuplex = this instanceof Duplex; - this._readableState = new ReadableState(options, this, isDuplex); // legacy +function uriInNoProxy (uri, noProxy) { + var port = uri.port || (uri.protocol === 'https:' ? '443' : '80') + var hostname = formatHostname(uri.hostname) + var noProxyList = noProxy.split(',') - this.readable = true; + // iterate through the noProxyList until it finds a match. + return noProxyList.map(parseNoProxyZone).some(function (noProxyZone) { + var isMatchedAt = hostname.indexOf(noProxyZone.hostname) + var hostnameMatched = ( + isMatchedAt > -1 && + (isMatchedAt === hostname.length - noProxyZone.hostname.length) + ) - if (options) { - if (typeof options.read === 'function') this._read = options.read; - if (typeof options.destroy === 'function') this._destroy = options.destroy; - } + if (noProxyZone.hasPort) { + return (port === noProxyZone.port) && hostnameMatched + } - Stream.call(this); + return hostnameMatched + }) } -Object.defineProperty(Readable.prototype, 'destroyed', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - if (this._readableState === undefined) { - return false; - } +function getProxyFromURI (uri) { + // Decide the proper request proxy to use based on the request URI object and the + // environmental variables (NO_PROXY, HTTP_PROXY, etc.) + // respect NO_PROXY environment variables (see: https://lynx.invisible-island.net/lynx2.8.7/breakout/lynx_help/keystrokes/environments.html) - return this._readableState.destroyed; - }, - set: function set(value) { - // we ignore the value if the stream - // has not been initialized yet - if (!this._readableState) { - return; - } // backward compatibility, the user is explicitly - // managing destroyed + var noProxy = process.env.NO_PROXY || process.env.no_proxy || '' + // if the noProxy is a wildcard then return null - this._readableState.destroyed = value; + if (noProxy === '*') { + return null } -}); -Readable.prototype.destroy = destroyImpl.destroy; -Readable.prototype._undestroy = destroyImpl.undestroy; - -Readable.prototype._destroy = function (err, cb) { - cb(err); -}; // Manually shove something into the read() buffer. -// This returns true if the highWaterMark has not been hit yet, -// similar to how Writable.write() returns true if you should -// write() some more. + // if the noProxy is not empty and the uri is found return null -Readable.prototype.push = function (chunk, encoding) { - var state = this._readableState; - var skipChunkCheck; + if (noProxy !== '' && uriInNoProxy(uri, noProxy)) { + return null + } - if (!state.objectMode) { - if (typeof chunk === 'string') { - encoding = encoding || state.defaultEncoding; + // Check for HTTP or HTTPS Proxy in environment Else default to null - if (encoding !== state.encoding) { - chunk = Buffer.from(chunk, encoding); - encoding = ''; - } + if (uri.protocol === 'http:') { + return process.env.HTTP_PROXY || + process.env.http_proxy || null + } - skipChunkCheck = true; - } - } else { - skipChunkCheck = true; + if (uri.protocol === 'https:') { + return process.env.HTTPS_PROXY || + process.env.https_proxy || + process.env.HTTP_PROXY || + process.env.http_proxy || null } - return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); -}; // Unshift should *always* be something directly out of read() + // if none of that works, return null + // (What uri protocol are you using then?) + return null +} -Readable.prototype.unshift = function (chunk) { - return readableAddChunk(this, chunk, null, true, false); -}; +module.exports = getProxyFromURI -function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { - debug('readableAddChunk', chunk); - var state = stream._readableState; - if (chunk === null) { - state.reading = false; - onEofChunk(stream, state); - } else { - var er; - if (!skipChunkCheck) er = chunkInvalid(state, chunk); +/***/ }), - if (er) { - errorOrDestroy(stream, er); - } else if (state.objectMode || chunk && chunk.length > 0) { - if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { - chunk = _uint8ArrayToBuffer(chunk); - } +/***/ 3248: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (addToFront) { - if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); - } else if (state.ended) { - errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); - } else if (state.destroyed) { - return false; - } else { - state.reading = false; +"use strict"; - if (state.decoder && !encoding) { - chunk = state.decoder.write(chunk); - if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); - } else { - addChunk(stream, state, chunk, false); - } - } - } else if (!addToFront) { - state.reading = false; - maybeReadMore(stream, state); - } - } // We can push more data if we are below the highWaterMark. - // Also, if we have no data yet, we can stand some more bytes. - // This is to work around cases where hwm=0, such as the repl. +var fs = __nccwpck_require__(35747) +var qs = __nccwpck_require__(71191) +var validate = __nccwpck_require__(75697) +var extend = __nccwpck_require__(38171) - return !state.ended && (state.length < state.highWaterMark || state.length === 0); +function Har (request) { + this.request = request } -function addChunk(stream, state, chunk, addToFront) { - if (state.flowing && state.length === 0 && !state.sync) { - state.awaitDrain = 0; - stream.emit('data', chunk); - } else { - // update the buffer info. - state.length += state.objectMode ? 1 : chunk.length; - if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); - if (state.needReadable) emitReadable(stream); +Har.prototype.reducer = function (obj, pair) { + // new property ? + if (obj[pair.name] === undefined) { + obj[pair.name] = pair.value + return obj } - maybeReadMore(stream, state); -} - -function chunkInvalid(state, chunk) { - var er; + // existing? convert to array + var arr = [ + obj[pair.name], + pair.value + ] - if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { - er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); - } + obj[pair.name] = arr - return er; + return obj } -Readable.prototype.isPaused = function () { - return this._readableState.flowing === false; -}; // backwards compatibility. +Har.prototype.prep = function (data) { + // construct utility properties + data.queryObj = {} + data.headersObj = {} + data.postData.jsonObj = false + data.postData.paramsObj = false + // construct query objects + if (data.queryString && data.queryString.length) { + data.queryObj = data.queryString.reduce(this.reducer, {}) + } -Readable.prototype.setEncoding = function (enc) { - if (!StringDecoder) StringDecoder = __nccwpck_require__(94841)/* .StringDecoder */ .s; - var decoder = new StringDecoder(enc); - this._readableState.decoder = decoder; // If setEncoding(null), decoder.encoding equals utf8 + // construct headers objects + if (data.headers && data.headers.length) { + // loweCase header keys + data.headersObj = data.headers.reduceRight(function (headers, header) { + headers[header.name] = header.value + return headers + }, {}) + } - this._readableState.encoding = this._readableState.decoder.encoding; // Iterate over current buffer to convert already stored Buffers: + // construct Cookie header + if (data.cookies && data.cookies.length) { + var cookies = data.cookies.map(function (cookie) { + return cookie.name + '=' + cookie.value + }) - var p = this._readableState.buffer.head; - var content = ''; + if (cookies.length) { + data.headersObj.cookie = cookies.join('; ') + } + } - while (p !== null) { - content += decoder.write(p.data); - p = p.next; + // prep body + function some (arr) { + return arr.some(function (type) { + return data.postData.mimeType.indexOf(type) === 0 + }) } - this._readableState.buffer.clear(); + if (some([ + 'multipart/mixed', + 'multipart/related', + 'multipart/form-data', + 'multipart/alternative'])) { + // reset values + data.postData.mimeType = 'multipart/form-data' + } else if (some([ + 'application/x-www-form-urlencoded'])) { + if (!data.postData.params) { + data.postData.text = '' + } else { + data.postData.paramsObj = data.postData.params.reduce(this.reducer, {}) - if (content !== '') this._readableState.buffer.push(content); - this._readableState.length = content.length; - return this; -}; // Don't raise the hwm > 1GB + // always overwrite + data.postData.text = qs.stringify(data.postData.paramsObj) + } + } else if (some([ + 'text/json', + 'text/x-json', + 'application/json', + 'application/x-json'])) { + data.postData.mimeType = 'application/json' + if (data.postData.text) { + try { + data.postData.jsonObj = JSON.parse(data.postData.text) + } catch (e) { + this.request.debug(e) -var MAX_HWM = 0x40000000; + // force back to text/plain + data.postData.mimeType = 'text/plain' + } + } + } -function computeNewHighWaterMark(n) { - if (n >= MAX_HWM) { - // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. - n = MAX_HWM; - } else { - // Get the next highest power of 2 to prevent increasing hwm excessively in - // tiny amounts - n--; - n |= n >>> 1; - n |= n >>> 2; - n |= n >>> 4; - n |= n >>> 8; - n |= n >>> 16; - n++; + return data +} + +Har.prototype.options = function (options) { + // skip if no har property defined + if (!options.har) { + return options } - return n; -} // This function is designed to be inlinable, so please take care when making -// changes to the function body. + var har = {} + extend(har, options.har) + // only process the first entry + if (har.log && har.log.entries) { + har = har.log.entries[0] + } -function howMuchToRead(n, state) { - if (n <= 0 || state.length === 0 && state.ended) return 0; - if (state.objectMode) return 1; + // add optional properties to make validation successful + har.url = har.url || options.url || options.uri || options.baseUrl || '/' + har.httpVersion = har.httpVersion || 'HTTP/1.1' + har.queryString = har.queryString || [] + har.headers = har.headers || [] + har.cookies = har.cookies || [] + har.postData = har.postData || {} + har.postData.mimeType = har.postData.mimeType || 'application/octet-stream' - if (n !== n) { - // Only flow one buffer at a time - if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; - } // If we're asking for more than the current hwm, then raise the hwm. + har.bodySize = 0 + har.headersSize = 0 + har.postData.size = 0 + if (!validate.request(har)) { + return options + } - if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); - if (n <= state.length) return n; // Don't have enough + // clean up and get some utility properties + var req = this.prep(har) - if (!state.ended) { - state.needReadable = true; - return 0; + // construct new options + if (req.url) { + options.url = req.url } - return state.length; -} // you can override either this method, or the async _read(n) below. + if (req.method) { + options.method = req.method + } + if (Object.keys(req.queryObj).length) { + options.qs = req.queryObj + } -Readable.prototype.read = function (n) { - debug('read', n); - n = parseInt(n, 10); - var state = this._readableState; - var nOrig = n; - if (n !== 0) state.emittedReadable = false; // if we're doing read(0) to trigger a readable event, but we - // already have a bunch of data in the buffer, then just trigger - // the 'readable' event and move on. + if (Object.keys(req.headersObj).length) { + options.headers = req.headersObj + } - if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { - debug('read: emitReadable', state.length, state.ended); - if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); - return null; + function test (type) { + return req.postData.mimeType.indexOf(type) === 0 } + if (test('application/x-www-form-urlencoded')) { + options.form = req.postData.paramsObj + } else if (test('application/json')) { + if (req.postData.jsonObj) { + options.body = req.postData.jsonObj + options.json = true + } + } else if (test('multipart/form-data')) { + options.formData = {} - n = howMuchToRead(n, state); // if we've ended, and we're now clear, then finish it up. + req.postData.params.forEach(function (param) { + var attachment = {} - if (n === 0 && state.ended) { - if (state.length === 0) endReadable(this); - return null; - } // All the actual chunk generation logic needs to be - // *below* the call to _read. The reason is that in certain - // synthetic stream cases, such as passthrough streams, _read - // may be a completely synchronous operation which may change - // the state of the read buffer, providing enough data when - // before there was *not* enough. - // - // So, the steps are: - // 1. Figure out what the state of things will be after we do - // a read from the buffer. - // - // 2. If that resulting state will trigger a _read, then call _read. - // Note that this may be asynchronous, or synchronous. Yes, it is - // deeply ugly to write APIs this way, but that still doesn't mean - // that the Readable class should behave improperly, as streams are - // designed to be sync/async agnostic. - // Take note if the _read call is sync or async (ie, if the read call - // has returned yet), so that we know whether or not it's safe to emit - // 'readable' etc. - // - // 3. Actually pull the requested chunks out of the buffer and return. - // if we need a readable event, then we need to do some reading. + if (!param.fileName && !param.contentType) { + options.formData[param.name] = param.value + return + } + // attempt to read from disk! + if (param.fileName && !param.value) { + attachment.value = fs.createReadStream(param.fileName) + } else if (param.value) { + attachment.value = param.value + } - var doRead = state.needReadable; - debug('need readable', doRead); // if we currently have less than the highWaterMark, then also read some + if (param.fileName) { + attachment.options = { + filename: param.fileName, + contentType: param.contentType ? param.contentType : null + } + } - if (state.length === 0 || state.length - n < state.highWaterMark) { - doRead = true; - debug('length less than watermark', doRead); - } // however, if we've ended, then there's no point, and if we're already - // reading, then it's unnecessary. + options.formData[param.name] = attachment + }) + } else { + if (req.postData.text) { + options.body = req.postData.text + } + } + return options +} - if (state.ended || state.reading) { - doRead = false; - debug('reading or ended', doRead); - } else if (doRead) { - debug('do read'); - state.reading = true; - state.sync = true; // if the length is currently zero, then we *need* a readable event. +exports.t = Har - if (state.length === 0) state.needReadable = true; // call internal read method - this._read(state.highWaterMark); +/***/ }), - state.sync = false; // If _read pushed data synchronously, then `reading` will be false, - // and we need to re-evaluate how much data we can return to the user. +/***/ 34473: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (!state.reading) n = howMuchToRead(nOrig, state); - } +"use strict"; - var ret; - if (n > 0) ret = fromList(n, state);else ret = null; - if (ret === null) { - state.needReadable = state.length <= state.highWaterMark; - n = 0; - } else { - state.length -= n; - state.awaitDrain = 0; - } +var crypto = __nccwpck_require__(76417) - if (state.length === 0) { - // If we have nothing in the buffer, then we want to know - // as soon as we *do* get something into the buffer. - if (!state.ended) state.needReadable = true; // If we tried to read() past the EOF, then emit end on the next tick. +function randomString (size) { + var bits = (size + 1) * 6 + var buffer = crypto.randomBytes(Math.ceil(bits / 8)) + var string = buffer.toString('base64').replace(/\+/g, '-').replace(/\//g, '_').replace(/=/g, '') + return string.slice(0, size) +} + +function calculatePayloadHash (payload, algorithm, contentType) { + var hash = crypto.createHash(algorithm) + hash.update('hawk.1.payload\n') + hash.update((contentType ? contentType.split(';')[0].trim().toLowerCase() : '') + '\n') + hash.update(payload || '') + hash.update('\n') + return hash.digest('base64') +} + +exports.calculateMac = function (credentials, opts) { + var normalized = 'hawk.1.header\n' + + opts.ts + '\n' + + opts.nonce + '\n' + + (opts.method || '').toUpperCase() + '\n' + + opts.resource + '\n' + + opts.host.toLowerCase() + '\n' + + opts.port + '\n' + + (opts.hash || '') + '\n' - if (nOrig !== n && state.ended) endReadable(this); + if (opts.ext) { + normalized = normalized + opts.ext.replace('\\', '\\\\').replace('\n', '\\n') } - if (ret !== null) this.emit('data', ret); - return ret; -}; + normalized = normalized + '\n' -function onEofChunk(stream, state) { - debug('onEofChunk'); - if (state.ended) return; + if (opts.app) { + normalized = normalized + opts.app + '\n' + (opts.dlg || '') + '\n' + } - if (state.decoder) { - var chunk = state.decoder.end(); + var hmac = crypto.createHmac(credentials.algorithm, credentials.key).update(normalized) + var digest = hmac.digest('base64') + return digest +} - if (chunk && chunk.length) { - state.buffer.push(chunk); - state.length += state.objectMode ? 1 : chunk.length; - } +exports.header = function (uri, method, opts) { + var timestamp = opts.timestamp || Math.floor((Date.now() + (opts.localtimeOffsetMsec || 0)) / 1000) + var credentials = opts.credentials + if (!credentials || !credentials.id || !credentials.key || !credentials.algorithm) { + return '' } - state.ended = true; + if (['sha1', 'sha256'].indexOf(credentials.algorithm) === -1) { + return '' + } - if (state.sync) { - // if we are sync, wait until next tick to emit the data. - // Otherwise we risk emitting data in the flow() - // the readable code triggers during a read() call - emitReadable(stream); - } else { - // emit 'readable' now to make sure it gets picked up. - state.needReadable = false; + var artifacts = { + ts: timestamp, + nonce: opts.nonce || randomString(6), + method: method, + resource: uri.pathname + (uri.search || ''), + host: uri.hostname, + port: uri.port || (uri.protocol === 'http:' ? 80 : 443), + hash: opts.hash, + ext: opts.ext, + app: opts.app, + dlg: opts.dlg + } - if (!state.emittedReadable) { - state.emittedReadable = true; - emitReadable_(stream); - } + if (!artifacts.hash && (opts.payload || opts.payload === '')) { + artifacts.hash = calculatePayloadHash(opts.payload, credentials.algorithm, opts.contentType) } -} // Don't emit readable right away in sync mode, because this can trigger -// another read() call => stack overflow. This way, it might trigger -// a nextTick recursion warning, but that's not so bad. + var mac = exports.calculateMac(credentials, artifacts) -function emitReadable(stream) { - var state = stream._readableState; - debug('emitReadable', state.needReadable, state.emittedReadable); - state.needReadable = false; + var hasExt = artifacts.ext !== null && artifacts.ext !== undefined && artifacts.ext !== '' + var header = 'Hawk id="' + credentials.id + + '", ts="' + artifacts.ts + + '", nonce="' + artifacts.nonce + + (artifacts.hash ? '", hash="' + artifacts.hash : '') + + (hasExt ? '", ext="' + artifacts.ext.replace(/\\/g, '\\\\').replace(/"/g, '\\"') : '') + + '", mac="' + mac + '"' - if (!state.emittedReadable) { - debug('emitReadable', state.flowing); - state.emittedReadable = true; - process.nextTick(emitReadable_, stream); + if (artifacts.app) { + header = header + ', app="' + artifacts.app + (artifacts.dlg ? '", dlg="' + artifacts.dlg : '') + '"' } + + return header } -function emitReadable_(stream) { - var state = stream._readableState; - debug('emitReadable_', state.destroyed, state.length, state.ended); - if (!state.destroyed && (state.length || state.ended)) { - stream.emit('readable'); - state.emittedReadable = false; - } // The stream needs another readable event if - // 1. It is not flowing, as the flow mechanism will take - // care of it. - // 2. It is not ended. - // 3. It is below the highWaterMark, so we can schedule - // another readable later. +/***/ }), +/***/ 74845: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; - flow(stream); -} // at this point, the user has presumably seen the 'readable' event, -// and called read() to consume some data. that may have triggered -// in turn another _read(n) call, in which case reading = true if -// it's in progress. -// However, if we're not ended, or reading, and the length < hwm, -// then go ahead and try to read some more preemptively. +"use strict"; -function maybeReadMore(stream, state) { - if (!state.readingMore) { - state.readingMore = true; - process.nextTick(maybeReadMore_, stream, state); - } -} +var jsonSafeStringify = __nccwpck_require__(57073) +var crypto = __nccwpck_require__(76417) +var Buffer = __nccwpck_require__(21867).Buffer -function maybeReadMore_(stream, state) { - // Attempt to read more data if we should. - // - // The conditions for reading more data are (one of): - // - Not enough data buffered (state.length < state.highWaterMark). The loop - // is responsible for filling the buffer with enough data if such data - // is available. If highWaterMark is 0 and we are not in the flowing mode - // we should _not_ attempt to buffer any extra data. We'll get more data - // when the stream consumer calls read() instead. - // - No data in the buffer, and the stream is in flowing mode. In this mode - // the loop below is responsible for ensuring read() is called. Failing to - // call read here would abort the flow and there's no other mechanism for - // continuing the flow if the stream consumer has just subscribed to the - // 'data' event. - // - // In addition to the above conditions to keep reading data, the following - // conditions prevent the data from being read: - // - The stream has ended (state.ended). - // - There is already a pending 'read' operation (state.reading). This is a - // case where the the stream has called the implementation defined _read() - // method, but they are processing the call asynchronously and have _not_ - // called push() with new data. In this case we skip performing more - // read()s. The execution ends in this method again after the _read() ends - // up calling push() with more data. - while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { - var len = state.length; - debug('maybeReadMore read 0'); - stream.read(0); - if (len === state.length) // didn't get any data, stop spinning. - break; - } +var defer = typeof setImmediate === 'undefined' + ? process.nextTick + : setImmediate - state.readingMore = false; -} // abstract method. to be overridden in specific implementation classes. -// call cb(er, data) where data is <= n in length. -// for virtual (non-string, non-buffer) streams, "length" is somewhat -// arbitrary, and perhaps not very meaningful. +function paramsHaveRequestBody (params) { + return ( + params.body || + params.requestBodyStream || + (params.json && typeof params.json !== 'boolean') || + params.multipart + ) +} +function safeStringify (obj, replacer) { + var ret + try { + ret = JSON.stringify(obj, replacer) + } catch (e) { + ret = jsonSafeStringify(obj, replacer) + } + return ret +} -Readable.prototype._read = function (n) { - errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); -}; +function md5 (str) { + return crypto.createHash('md5').update(str).digest('hex') +} -Readable.prototype.pipe = function (dest, pipeOpts) { - var src = this; - var state = this._readableState; +function isReadStream (rs) { + return rs.readable && rs.path && rs.mode +} - switch (state.pipesCount) { - case 0: - state.pipes = dest; - break; +function toBase64 (str) { + return Buffer.from(str || '', 'utf8').toString('base64') +} - case 1: - state.pipes = [state.pipes, dest]; - break; +function copy (obj) { + var o = {} + Object.keys(obj).forEach(function (i) { + o[i] = obj[i] + }) + return o +} - default: - state.pipes.push(dest); - break; +function version () { + var numbers = process.version.replace('v', '').split('.') + return { + major: parseInt(numbers[0], 10), + minor: parseInt(numbers[1], 10), + patch: parseInt(numbers[2], 10) } +} - state.pipesCount += 1; - debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); - var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; - var endFn = doEnd ? onend : unpipe; - if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); - dest.on('unpipe', onunpipe); +exports.paramsHaveRequestBody = paramsHaveRequestBody +exports.safeStringify = safeStringify +exports.md5 = md5 +exports.isReadStream = isReadStream +exports.toBase64 = toBase64 +exports.copy = copy +exports.version = version +exports.defer = defer - function onunpipe(readable, unpipeInfo) { - debug('onunpipe'); - if (readable === src) { - if (unpipeInfo && unpipeInfo.hasUnpiped === false) { - unpipeInfo.hasUnpiped = true; - cleanup(); - } - } - } +/***/ }), - function onend() { - debug('onend'); - dest.end(); - } // when the dest drains, it reduces the awaitDrain counter - // on the source. This would be more elegant with a .once() - // handler in flow(), but adding and removing repeatedly is - // too slow. +/***/ 87810: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +"use strict"; - var ondrain = pipeOnDrain(src); - dest.on('drain', ondrain); - var cleanedUp = false; - function cleanup() { - debug('cleanup'); // cleanup event handlers once the pipe is broken +var uuid = __nccwpck_require__(71435) +var CombinedStream = __nccwpck_require__(85443) +var isstream = __nccwpck_require__(83362) +var Buffer = __nccwpck_require__(21867).Buffer - dest.removeListener('close', onclose); - dest.removeListener('finish', onfinish); - dest.removeListener('drain', ondrain); - dest.removeListener('error', onerror); - dest.removeListener('unpipe', onunpipe); - src.removeListener('end', onend); - src.removeListener('end', unpipe); - src.removeListener('data', ondata); - cleanedUp = true; // if the reader is waiting for a drain event from this - // specific writer, then it would cause it to never start - // flowing again. - // So, if this is awaiting a drain, then we just call it now. - // If we don't know, then assume that we are waiting for one. +function Multipart (request) { + this.request = request + this.boundary = uuid() + this.chunked = false + this.body = null +} - if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); +Multipart.prototype.isChunked = function (options) { + var self = this + var chunked = false + var parts = options.data || options + + if (!parts.forEach) { + self.request.emit('error', new Error('Argument error, options.multipart.')) } - src.on('data', ondata); + if (options.chunked !== undefined) { + chunked = options.chunked + } - function ondata(chunk) { - debug('ondata'); - var ret = dest.write(chunk); - debug('dest.write', ret); + if (self.request.getHeader('transfer-encoding') === 'chunked') { + chunked = true + } - if (ret === false) { - // If the user unpiped during `dest.write()`, it is possible - // to get stuck in a permanently paused state if that write - // also returned false. - // => Check whether `dest` is still a piping destination. - if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { - debug('false write response, pause', state.awaitDrain); - state.awaitDrain++; + if (!chunked) { + parts.forEach(function (part) { + if (typeof part.body === 'undefined') { + self.request.emit('error', new Error('Body attribute missing in multipart.')) } + if (isstream(part.body)) { + chunked = true + } + }) + } - src.pause(); - } - } // if the dest has an error, then stop piping into it. - // however, don't suppress the throwing behavior for this. - - - function onerror(er) { - debug('onerror', er); - unpipe(); - dest.removeListener('error', onerror); - if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); - } // Make sure our error handler is attached before userland ones. - + return chunked +} - prependListener(dest, 'error', onerror); // Both close and finish should trigger unpipe, but only once. +Multipart.prototype.setHeaders = function (chunked) { + var self = this - function onclose() { - dest.removeListener('finish', onfinish); - unpipe(); + if (chunked && !self.request.hasHeader('transfer-encoding')) { + self.request.setHeader('transfer-encoding', 'chunked') } - dest.once('close', onclose); + var header = self.request.getHeader('content-type') - function onfinish() { - debug('onfinish'); - dest.removeListener('close', onclose); - unpipe(); + if (!header || header.indexOf('multipart') === -1) { + self.request.setHeader('content-type', 'multipart/related; boundary=' + self.boundary) + } else { + if (header.indexOf('boundary') !== -1) { + self.boundary = header.replace(/.*boundary=([^\s;]+).*/, '$1') + } else { + self.request.setHeader('content-type', header + '; boundary=' + self.boundary) + } } +} - dest.once('finish', onfinish); +Multipart.prototype.build = function (parts, chunked) { + var self = this + var body = chunked ? new CombinedStream() : [] - function unpipe() { - debug('unpipe'); - src.unpipe(dest); - } // tell the dest that it's being piped to + function add (part) { + if (typeof part === 'number') { + part = part.toString() + } + return chunked ? body.append(part) : body.push(Buffer.from(part)) + } + if (self.request.preambleCRLF) { + add('\r\n') + } - dest.emit('pipe', src); // start the flow if it hasn't been started already. + parts.forEach(function (part) { + var preamble = '--' + self.boundary + '\r\n' + Object.keys(part).forEach(function (key) { + if (key === 'body') { return } + preamble += key + ': ' + part[key] + '\r\n' + }) + preamble += '\r\n' + add(preamble) + add(part.body) + add('\r\n') + }) + add('--' + self.boundary + '--') - if (!state.flowing) { - debug('pipe resume'); - src.resume(); + if (self.request.postambleCRLF) { + add('\r\n') } - return dest; -}; + return body +} -function pipeOnDrain(src) { - return function pipeOnDrainFunctionResult() { - var state = src._readableState; - debug('pipeOnDrain', state.awaitDrain); - if (state.awaitDrain) state.awaitDrain--; +Multipart.prototype.onRequest = function (options) { + var self = this - if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { - state.flowing = true; - flow(src); - } - }; + var chunked = self.isChunked(options) + var parts = options.data || options + + self.setHeaders(chunked) + self.chunked = chunked + self.body = self.build(parts, chunked) } -Readable.prototype.unpipe = function (dest) { - var state = this._readableState; - var unpipeInfo = { - hasUnpiped: false - }; // if we're not piping anywhere, then do nothing. +exports.$ = Multipart - if (state.pipesCount === 0) return this; // just one destination. most common case. - if (state.pipesCount === 1) { - // passed in one, but it's not the right one. - if (dest && dest !== state.pipes) return this; - if (!dest) dest = state.pipes; // got a match. +/***/ }), - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - if (dest) dest.emit('unpipe', this, unpipeInfo); - return this; - } // slow case. multiple pipe destinations. +/***/ 41174: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +"use strict"; - if (!dest) { - // remove all. - var dests = state.pipes; - var len = state.pipesCount; - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - for (var i = 0; i < len; i++) { - dests[i].emit('unpipe', this, { - hasUnpiped: false - }); - } +var url = __nccwpck_require__(78835) +var qs = __nccwpck_require__(22760) +var caseless = __nccwpck_require__(35684) +var uuid = __nccwpck_require__(71435) +var oauth = __nccwpck_require__(43248) +var crypto = __nccwpck_require__(76417) +var Buffer = __nccwpck_require__(21867).Buffer - return this; - } // try to find the right one. +function OAuth (request) { + this.request = request + this.params = null +} +OAuth.prototype.buildParams = function (_oauth, uri, method, query, form, qsLib) { + var oa = {} + for (var i in _oauth) { + oa['oauth_' + i] = _oauth[i] + } + if (!oa.oauth_version) { + oa.oauth_version = '1.0' + } + if (!oa.oauth_timestamp) { + oa.oauth_timestamp = Math.floor(Date.now() / 1000).toString() + } + if (!oa.oauth_nonce) { + oa.oauth_nonce = uuid().replace(/-/g, '') + } + if (!oa.oauth_signature_method) { + oa.oauth_signature_method = 'HMAC-SHA1' + } - var index = indexOf(state.pipes, dest); - if (index === -1) return this; - state.pipes.splice(index, 1); - state.pipesCount -= 1; - if (state.pipesCount === 1) state.pipes = state.pipes[0]; - dest.emit('unpipe', this, unpipeInfo); - return this; -}; // set up data events if they are asked for -// Ensure readable listeners eventually get something + var consumer_secret_or_private_key = oa.oauth_consumer_secret || oa.oauth_private_key // eslint-disable-line camelcase + delete oa.oauth_consumer_secret + delete oa.oauth_private_key + var token_secret = oa.oauth_token_secret // eslint-disable-line camelcase + delete oa.oauth_token_secret -Readable.prototype.on = function (ev, fn) { - var res = Stream.prototype.on.call(this, ev, fn); - var state = this._readableState; + var realm = oa.oauth_realm + delete oa.oauth_realm + delete oa.oauth_transport_method - if (ev === 'data') { - // update readableListening so that resume() may be a no-op - // a few lines down. This is needed to support once('readable'). - state.readableListening = this.listenerCount('readable') > 0; // Try start flowing on next tick if stream isn't explicitly paused + var baseurl = uri.protocol + '//' + uri.host + uri.pathname + var params = qsLib.parse([].concat(query, form, qsLib.stringify(oa)).join('&')) - if (state.flowing !== false) this.resume(); - } else if (ev === 'readable') { - if (!state.endEmitted && !state.readableListening) { - state.readableListening = state.needReadable = true; - state.flowing = false; - state.emittedReadable = false; - debug('on readable', state.length, state.reading); + oa.oauth_signature = oauth.sign( + oa.oauth_signature_method, + method, + baseurl, + params, + consumer_secret_or_private_key, // eslint-disable-line camelcase + token_secret // eslint-disable-line camelcase + ) - if (state.length) { - emitReadable(this); - } else if (!state.reading) { - process.nextTick(nReadingNextTick, this); - } - } + if (realm) { + oa.realm = realm } - return res; -}; - -Readable.prototype.addListener = Readable.prototype.on; - -Readable.prototype.removeListener = function (ev, fn) { - var res = Stream.prototype.removeListener.call(this, ev, fn); + return oa +} - if (ev === 'readable') { - // We need to check if there is someone still listening to - // readable and reset the state. However this needs to happen - // after readable has been emitted but before I/O (nextTick) to - // support once('readable', fn) cycles. This means that calling - // resume within the same tick will have no - // effect. - process.nextTick(updateReadableListening, this); +OAuth.prototype.buildBodyHash = function (_oauth, body) { + if (['HMAC-SHA1', 'RSA-SHA1'].indexOf(_oauth.signature_method || 'HMAC-SHA1') < 0) { + this.request.emit('error', new Error('oauth: ' + _oauth.signature_method + + ' signature_method not supported with body_hash signing.')) } - return res; -}; - -Readable.prototype.removeAllListeners = function (ev) { - var res = Stream.prototype.removeAllListeners.apply(this, arguments); + var shasum = crypto.createHash('sha1') + shasum.update(body || '') + var sha1 = shasum.digest('hex') - if (ev === 'readable' || ev === undefined) { - // We need to check if there is someone still listening to - // readable and reset the state. However this needs to happen - // after readable has been emitted but before I/O (nextTick) to - // support once('readable', fn) cycles. This means that calling - // resume within the same tick will have no - // effect. - process.nextTick(updateReadableListening, this); - } + return Buffer.from(sha1, 'hex').toString('base64') +} - return res; -}; +OAuth.prototype.concatParams = function (oa, sep, wrap) { + wrap = wrap || '' -function updateReadableListening(self) { - var state = self._readableState; - state.readableListening = self.listenerCount('readable') > 0; + var params = Object.keys(oa).filter(function (i) { + return i !== 'realm' && i !== 'oauth_signature' + }).sort() - if (state.resumeScheduled && !state.paused) { - // flowing needs to be set to true now, otherwise - // the upcoming resume will not flow. - state.flowing = true; // crude way to check if we should resume - } else if (self.listenerCount('data') > 0) { - self.resume(); + if (oa.realm) { + params.splice(0, 0, 'realm') } + params.push('oauth_signature') + + return params.map(function (i) { + return i + '=' + wrap + oauth.rfc3986(oa[i]) + wrap + }).join(sep) } -function nReadingNextTick(self) { - debug('readable nexttick read 0'); - self.read(0); -} // pause() and resume() are remnants of the legacy readable stream API -// If the user uses them, then switch into old mode. +OAuth.prototype.onRequest = function (_oauth) { + var self = this + self.params = _oauth + var uri = self.request.uri || {} + var method = self.request.method || '' + var headers = caseless(self.request.headers) + var body = self.request.body || '' + var qsLib = self.request.qsLib || qs -Readable.prototype.resume = function () { - var state = this._readableState; + var form + var query + var contentType = headers.get('content-type') || '' + var formContentType = 'application/x-www-form-urlencoded' + var transport = _oauth.transport_method || 'header' - if (!state.flowing) { - debug('resume'); // we flow only if there is no one listening - // for readable, but we still have to call - // resume() + if (contentType.slice(0, formContentType.length) === formContentType) { + contentType = formContentType + form = body + } + if (uri.query) { + query = uri.query + } + if (transport === 'body' && (method !== 'POST' || contentType !== formContentType)) { + self.request.emit('error', new Error('oauth: transport_method of body requires POST ' + + 'and content-type ' + formContentType)) + } - state.flowing = !state.readableListening; - resume(this, state); + if (!form && typeof _oauth.body_hash === 'boolean') { + _oauth.body_hash = self.buildBodyHash(_oauth, self.request.body.toString()) } - state.paused = false; - return this; -}; + var oa = self.buildParams(_oauth, uri, method, query, form, qsLib) -function resume(stream, state) { - if (!state.resumeScheduled) { - state.resumeScheduled = true; - process.nextTick(resume_, stream, state); - } -} + switch (transport) { + case 'header': + self.request.setHeader('Authorization', 'OAuth ' + self.concatParams(oa, ',', '"')) + break -function resume_(stream, state) { - debug('resume', state.reading); + case 'query': + var href = self.request.uri.href += (query ? '&' : '?') + self.concatParams(oa, '&') + self.request.uri = url.parse(href) + self.request.path = self.request.uri.path + break - if (!state.reading) { - stream.read(0); - } + case 'body': + self.request.body = (form ? form + '&' : '') + self.concatParams(oa, '&') + break - state.resumeScheduled = false; - stream.emit('resume'); - flow(stream); - if (state.flowing && !state.reading) stream.read(0); + default: + self.request.emit('error', new Error('oauth: transport_method invalid')) + } } -Readable.prototype.pause = function () { - debug('call pause flowing=%j', this._readableState.flowing); +exports.f = OAuth - if (this._readableState.flowing !== false) { - debug('pause'); - this._readableState.flowing = false; - this.emit('pause'); - } - this._readableState.paused = true; - return this; -}; +/***/ }), -function flow(stream) { - var state = stream._readableState; - debug('flow', state.flowing); +/***/ 66476: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - while (state.flowing && stream.read() !== null) { - ; - } -} // wrap an old-style stream as the async data source. -// This is *not* part of the readable stream interface. -// It is an ugly unfortunate mess of history. +"use strict"; -Readable.prototype.wrap = function (stream) { - var _this = this; +var qs = __nccwpck_require__(22760) +var querystring = __nccwpck_require__(71191) - var state = this._readableState; - var paused = false; - stream.on('end', function () { - debug('wrapped end'); +function Querystring (request) { + this.request = request + this.lib = null + this.useQuerystring = null + this.parseOptions = null + this.stringifyOptions = null +} - if (state.decoder && !state.ended) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) _this.push(chunk); - } +Querystring.prototype.init = function (options) { + if (this.lib) { return } - _this.push(null); - }); - stream.on('data', function (chunk) { - debug('wrapped data'); - if (state.decoder) chunk = state.decoder.write(chunk); // don't skip over falsy values in objectMode + this.useQuerystring = options.useQuerystring + this.lib = (this.useQuerystring ? querystring : qs) - if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + this.parseOptions = options.qsParseOptions || {} + this.stringifyOptions = options.qsStringifyOptions || {} +} - var ret = _this.push(chunk); +Querystring.prototype.stringify = function (obj) { + return (this.useQuerystring) + ? this.rfc3986(this.lib.stringify(obj, + this.stringifyOptions.sep || null, + this.stringifyOptions.eq || null, + this.stringifyOptions)) + : this.lib.stringify(obj, this.stringifyOptions) +} - if (!ret) { - paused = true; - stream.pause(); - } - }); // proxy all the other methods. - // important when wrapping filters and duplexes. +Querystring.prototype.parse = function (str) { + return (this.useQuerystring) + ? this.lib.parse(str, + this.parseOptions.sep || null, + this.parseOptions.eq || null, + this.parseOptions) + : this.lib.parse(str, this.parseOptions) +} - for (var i in stream) { - if (this[i] === undefined && typeof stream[i] === 'function') { - this[i] = function methodWrap(method) { - return function methodWrapReturnFunction() { - return stream[method].apply(stream, arguments); - }; - }(i); - } - } // proxy certain important events. +Querystring.prototype.rfc3986 = function (str) { + return str.replace(/[!'()*]/g, function (c) { + return '%' + c.charCodeAt(0).toString(16).toUpperCase() + }) +} +Querystring.prototype.unescape = querystring.unescape - for (var n = 0; n < kProxyEvents.length; n++) { - stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); - } // when we try to consume some more bytes, simply unpause the - // underlying stream. +exports.h = Querystring - this._read = function (n) { - debug('wrapped _read', n); +/***/ }), - if (paused) { - paused = false; - stream.resume(); - } - }; +/***/ 3048: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - return this; -}; +"use strict"; -if (typeof Symbol === 'function') { - Readable.prototype[Symbol.asyncIterator] = function () { - if (createReadableStreamAsyncIterator === undefined) { - createReadableStreamAsyncIterator = __nccwpck_require__(43306); - } - return createReadableStreamAsyncIterator(this); - }; +var url = __nccwpck_require__(78835) +var isUrl = /^https?:/ + +function Redirect (request) { + this.request = request + this.followRedirect = true + this.followRedirects = true + this.followAllRedirects = false + this.followOriginalHttpMethod = false + this.allowRedirect = function () { return true } + this.maxRedirects = 10 + this.redirects = [] + this.redirectsFollowed = 0 + this.removeRefererHeader = false } -Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._readableState.highWaterMark; +Redirect.prototype.onRequest = function (options) { + var self = this + + if (options.maxRedirects !== undefined) { + self.maxRedirects = options.maxRedirects } -}); -Object.defineProperty(Readable.prototype, 'readableBuffer', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._readableState && this._readableState.buffer; + if (typeof options.followRedirect === 'function') { + self.allowRedirect = options.followRedirect } -}); -Object.defineProperty(Readable.prototype, 'readableFlowing', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._readableState.flowing; - }, - set: function set(state) { - if (this._readableState) { - this._readableState.flowing = state; - } + if (options.followRedirect !== undefined) { + self.followRedirects = !!options.followRedirect } -}); // exposed for testing purposes only. - -Readable._fromList = fromList; -Object.defineProperty(Readable.prototype, 'readableLength', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._readableState.length; + if (options.followAllRedirects !== undefined) { + self.followAllRedirects = options.followAllRedirects } -}); // Pluck off n bytes from an array of buffers. -// Length is the combined lengths of all the buffers in the list. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. - -function fromList(n, state) { - // nothing buffered - if (state.length === 0) return null; - var ret; - if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { - // read it all, truncate the list - if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); - state.buffer.clear(); - } else { - // read part of list - ret = state.buffer.consume(n, state.decoder); + if (self.followRedirects || self.followAllRedirects) { + self.redirects = self.redirects || [] } - return ret; -} - -function endReadable(stream) { - var state = stream._readableState; - debug('endReadable', state.endEmitted); - - if (!state.endEmitted) { - state.ended = true; - process.nextTick(endReadableNT, state, stream); + if (options.removeRefererHeader !== undefined) { + self.removeRefererHeader = options.removeRefererHeader + } + if (options.followOriginalHttpMethod !== undefined) { + self.followOriginalHttpMethod = options.followOriginalHttpMethod } } -function endReadableNT(state, stream) { - debug('endReadableNT', state.endEmitted, state.length); // Check that we didn't get one last unshift. - - if (!state.endEmitted && state.length === 0) { - state.endEmitted = true; - stream.readable = false; - stream.emit('end'); +Redirect.prototype.redirectTo = function (response) { + var self = this + var request = self.request - if (state.autoDestroy) { - // In case of duplex streams we need a way to detect - // if the writable side is ready for autoDestroy as well - var wState = stream._writableState; + var redirectTo = null + if (response.statusCode >= 300 && response.statusCode < 400 && response.caseless.has('location')) { + var location = response.caseless.get('location') + request.debug('redirect', location) - if (!wState || wState.autoDestroy && wState.finished) { - stream.destroy(); + if (self.followAllRedirects) { + redirectTo = location + } else if (self.followRedirects) { + switch (request.method) { + case 'PATCH': + case 'PUT': + case 'POST': + case 'DELETE': + // Do not follow redirects + break + default: + redirectTo = location + break } } + } else if (response.statusCode === 401) { + var authHeader = request._auth.onResponse(response) + if (authHeader) { + request.setHeader('authorization', authHeader) + redirectTo = request.uri + } } + return redirectTo } -if (typeof Symbol === 'function') { - Readable.from = function (iterable, opts) { - if (from === undefined) { - from = __nccwpck_require__(39082); - } +Redirect.prototype.onResponse = function (response) { + var self = this + var request = self.request - return from(Readable, iterable, opts); - }; -} + var redirectTo = self.redirectTo(response) + if (!redirectTo || !self.allowRedirect.call(request, response)) { + return false + } -function indexOf(xs, x) { - for (var i = 0, l = xs.length; i < l; i++) { - if (xs[i] === x) return i; + request.debug('redirect to', redirectTo) + + // ignore any potential response body. it cannot possibly be useful + // to us at this point. + // response.resume should be defined, but check anyway before calling. Workaround for browserify. + if (response.resume) { + response.resume() } - return -1; -} + if (self.redirectsFollowed >= self.maxRedirects) { + request.emit('error', new Error('Exceeded maxRedirects. Probably stuck in a redirect loop ' + request.uri.href)) + return false + } + self.redirectsFollowed += 1 -/***/ }), + if (!isUrl.test(redirectTo)) { + redirectTo = url.resolve(request.uri.href, redirectTo) + } -/***/ 34415: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + var uriPrev = request.uri + request.uri = url.parse(redirectTo) -"use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. -// a transform stream is a readable/writable stream where you do -// something with the data. Sometimes it's called a "filter", -// but that's not a great name for it, since that implies a thing where -// some bits pass through, and others are simply ignored. (That would -// be a valid example of a transform, of course.) -// -// While the output is causally related to the input, it's not a -// necessarily symmetric or synchronous transformation. For example, -// a zlib stream might take multiple plain-text writes(), and then -// emit a single compressed chunk some time in the future. -// -// Here's how this works: -// -// The Transform stream has all the aspects of the readable and writable -// stream classes. When you write(chunk), that calls _write(chunk,cb) -// internally, and returns false if there's a lot of pending writes -// buffered up. When you call read(), that calls _read(n) until -// there's enough pending readable data buffered up. -// -// In a transform stream, the written data is placed in a buffer. When -// _read(n) is called, it transforms the queued up data, calling the -// buffered _write cb's as it consumes chunks. If consuming a single -// written chunk would result in multiple output chunks, then the first -// outputted bit calls the readcb, and subsequent chunks just go into -// the read buffer, and will cause it to emit 'readable' if necessary. -// -// This way, back-pressure is actually determined by the reading side, -// since _read has to be called to start processing a new chunk. However, -// a pathological inflate type of transform can cause excessive buffering -// here. For example, imagine a stream where every byte of input is -// interpreted as an integer from 0-255, and then results in that many -// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in -// 1kb of data being output. In this case, you could write a very small -// amount of input, and end up with a very large amount of output. In -// such a pathological inflating mechanism, there'd be no way to tell -// the system to stop doing the transform. A single 4MB write could -// cause the system to run out of memory. -// -// However, even in such a pathological case, only a single written chunk -// would be consumed, and then the rest would wait (un-transformed) until -// the results of the previous transformed chunk were consumed. + // handle the case where we change protocol from https to http or vice versa + if (request.uri.protocol !== uriPrev.protocol) { + delete request.agent + } + + self.redirects.push({ statusCode: response.statusCode, redirectUri: redirectTo }) + + if (self.followAllRedirects && request.method !== 'HEAD' && + response.statusCode !== 401 && response.statusCode !== 307) { + request.method = self.followOriginalHttpMethod ? request.method : 'GET' + } + // request.method = 'GET' // Force all redirects to use GET || commented out fixes #215 + delete request.src + delete request.req + delete request._started + if (response.statusCode !== 401 && response.statusCode !== 307) { + // Remove parameters from the previous response, unless this is the second request + // for a server that requires digest authentication. + delete request.body + delete request._form + if (request.headers) { + request.removeHeader('host') + request.removeHeader('content-type') + request.removeHeader('content-length') + if (request.uri.hostname !== request.originalHost.split(':')[0]) { + // Remove authorization if changing hostnames (but not if just + // changing ports or protocols). This matches the behavior of curl: + // https://github.com/bagder/curl/blob/6beb0eee/lib/http.c#L710 + request.removeHeader('authorization') + } + } + } + if (!self.removeRefererHeader) { + request.setHeader('referer', uriPrev.href) + } -module.exports = Transform; + request.emit('redirect') -var _require$codes = __nccwpck_require__(67214)/* .codes */ .q, - ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, - ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, - ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, - ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; + request.init() -var Duplex = __nccwpck_require__(41359); + return true +} -__nccwpck_require__(44124)(Transform, Duplex); +exports.l = Redirect -function afterTransform(er, data) { - var ts = this._transformState; - ts.transforming = false; - var cb = ts.writecb; - if (cb === null) { - return this.emit('error', new ERR_MULTIPLE_CALLBACK()); - } +/***/ }), - ts.writechunk = null; - ts.writecb = null; - if (data != null) // single equals check for both `null` and `undefined` - this.push(data); - cb(er); - var rs = this._readableState; - rs.reading = false; +/***/ 17619: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (rs.needReadable || rs.length < rs.highWaterMark) { - this._read(rs.highWaterMark); - } -} +"use strict"; -function Transform(options) { - if (!(this instanceof Transform)) return new Transform(options); - Duplex.call(this, options); - this._transformState = { - afterTransform: afterTransform.bind(this), - needTransform: false, - transforming: false, - writecb: null, - writechunk: null, - writeencoding: null - }; // start out asking for a readable event once data is transformed. - this._readableState.needReadable = true; // we have implemented the _read method, and done the other things - // that Readable wants before the first _read call, so unset the - // sync guard flag. +var url = __nccwpck_require__(78835) +var tunnel = __nccwpck_require__(11137) - this._readableState.sync = false; +var defaultProxyHeaderWhiteList = [ + 'accept', + 'accept-charset', + 'accept-encoding', + 'accept-language', + 'accept-ranges', + 'cache-control', + 'content-encoding', + 'content-language', + 'content-location', + 'content-md5', + 'content-range', + 'content-type', + 'connection', + 'date', + 'expect', + 'max-forwards', + 'pragma', + 'referer', + 'te', + 'user-agent', + 'via' +] - if (options) { - if (typeof options.transform === 'function') this._transform = options.transform; - if (typeof options.flush === 'function') this._flush = options.flush; - } // When the writable side finishes, then flush out anything remaining. +var defaultProxyHeaderExclusiveList = [ + 'proxy-authorization' +] + +function constructProxyHost (uriObject) { + var port = uriObject.port + var protocol = uriObject.protocol + var proxyHost = uriObject.hostname + ':' + if (port) { + proxyHost += port + } else if (protocol === 'https:') { + proxyHost += '443' + } else { + proxyHost += '80' + } - this.on('prefinish', prefinish); + return proxyHost } -function prefinish() { - var _this = this; +function constructProxyHeaderWhiteList (headers, proxyHeaderWhiteList) { + var whiteList = proxyHeaderWhiteList + .reduce(function (set, header) { + set[header.toLowerCase()] = true + return set + }, {}) - if (typeof this._flush === 'function' && !this._readableState.destroyed) { - this._flush(function (er, data) { - done(_this, er, data); - }); - } else { - done(this, null, null); + return Object.keys(headers) + .filter(function (header) { + return whiteList[header.toLowerCase()] + }) + .reduce(function (set, header) { + set[header] = headers[header] + return set + }, {}) +} + +function constructTunnelOptions (request, proxyHeaders) { + var proxy = request.proxy + + var tunnelOptions = { + proxy: { + host: proxy.hostname, + port: +proxy.port, + proxyAuth: proxy.auth, + headers: proxyHeaders + }, + headers: request.headers, + ca: request.ca, + cert: request.cert, + key: request.key, + passphrase: request.passphrase, + pfx: request.pfx, + ciphers: request.ciphers, + rejectUnauthorized: request.rejectUnauthorized, + secureOptions: request.secureOptions, + secureProtocol: request.secureProtocol } + + return tunnelOptions } -Transform.prototype.push = function (chunk, encoding) { - this._transformState.needTransform = false; - return Duplex.prototype.push.call(this, chunk, encoding); -}; // This is the part where you do stuff! -// override this function in implementation classes. -// 'chunk' is an input chunk. -// -// Call `push(newChunk)` to pass along transformed output -// to the readable side. You may call 'push' zero or more times. -// -// Call `cb(err)` when you are done with this chunk. If you pass -// an error, then that'll put the hurt on the whole operation. If you -// never call cb(), then you'll never get another chunk. +function constructTunnelFnName (uri, proxy) { + var uriProtocol = (uri.protocol === 'https:' ? 'https' : 'http') + var proxyProtocol = (proxy.protocol === 'https:' ? 'Https' : 'Http') + return [uriProtocol, proxyProtocol].join('Over') +} +function getTunnelFn (request) { + var uri = request.uri + var proxy = request.proxy + var tunnelFnName = constructTunnelFnName(uri, proxy) + return tunnel[tunnelFnName] +} -Transform.prototype._transform = function (chunk, encoding, cb) { - cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); -}; +function Tunnel (request) { + this.request = request + this.proxyHeaderWhiteList = defaultProxyHeaderWhiteList + this.proxyHeaderExclusiveList = [] + if (typeof request.tunnel !== 'undefined') { + this.tunnelOverride = request.tunnel + } +} -Transform.prototype._write = function (chunk, encoding, cb) { - var ts = this._transformState; - ts.writecb = cb; - ts.writechunk = chunk; - ts.writeencoding = encoding; +Tunnel.prototype.isEnabled = function () { + var self = this + var request = self.request + // Tunnel HTTPS by default. Allow the user to override this setting. - if (!ts.transforming) { - var rs = this._readableState; - if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + // If self.tunnelOverride is set (the user specified a value), use it. + if (typeof self.tunnelOverride !== 'undefined') { + return self.tunnelOverride } -}; // Doesn't matter what the args are here. -// _transform does all the work. -// That we got here means that the readable side wants more data. + // If the destination is HTTPS, tunnel. + if (request.uri.protocol === 'https:') { + return true + } -Transform.prototype._read = function (n) { - var ts = this._transformState; + // Otherwise, do not use tunnel. + return false +} - if (ts.writechunk !== null && !ts.transforming) { - ts.transforming = true; +Tunnel.prototype.setup = function (options) { + var self = this + var request = self.request - this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); - } else { - // mark that we need a transform, so that any data that comes in - // will get processed, now that we've asked for it. - ts.needTransform = true; + options = options || {} + + if (typeof request.proxy === 'string') { + request.proxy = url.parse(request.proxy) } -}; -Transform.prototype._destroy = function (err, cb) { - Duplex.prototype._destroy.call(this, err, function (err2) { - cb(err2); - }); -}; + if (!request.proxy || !request.tunnel) { + return false + } -function done(stream, er, data) { - if (er) return stream.emit('error', er); - if (data != null) // single equals check for both `null` and `undefined` - stream.push(data); // TODO(BridgeAR): Write a test for these two error cases - // if there's nothing in the write buffer, then that means - // that nothing more will ever be provided + // Setup Proxy Header Exclusive List and White List + if (options.proxyHeaderWhiteList) { + self.proxyHeaderWhiteList = options.proxyHeaderWhiteList + } + if (options.proxyHeaderExclusiveList) { + self.proxyHeaderExclusiveList = options.proxyHeaderExclusiveList + } - if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); - if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); - return stream.push(null); -} + var proxyHeaderExclusiveList = self.proxyHeaderExclusiveList.concat(defaultProxyHeaderExclusiveList) + var proxyHeaderWhiteList = self.proxyHeaderWhiteList.concat(proxyHeaderExclusiveList) -/***/ }), + // Setup Proxy Headers and Proxy Headers Host + // Only send the Proxy White Listed Header names + var proxyHeaders = constructProxyHeaderWhiteList(request.headers, proxyHeaderWhiteList) + proxyHeaders.host = constructProxyHost(request.uri) -/***/ 26993: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + proxyHeaderExclusiveList.forEach(request.removeHeader, request) -"use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. -// A bit simpler than readable streams. -// Implement an async ._write(chunk, encoding, cb), and it'll handle all -// the drain event emission and buffering. + // Set Agent from Tunnel Data + var tunnelFn = getTunnelFn(request) + var tunnelOptions = constructTunnelOptions(request, proxyHeaders) + request.agent = tunnelFn(tunnelOptions) + return true +} -module.exports = Writable; -/* */ +Tunnel.defaultProxyHeaderWhiteList = defaultProxyHeaderWhiteList +Tunnel.defaultProxyHeaderExclusiveList = defaultProxyHeaderExclusiveList +exports.n = Tunnel -function WriteReq(chunk, encoding, cb) { - this.chunk = chunk; - this.encoding = encoding; - this.callback = cb; - this.next = null; -} // It seems a linked list but it is not -// there will be only 2 of these for each stream +/***/ }), -function CorkedRequest(state) { - var _this = this; +/***/ 47279: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - this.next = null; - this.entry = null; +"use strict"; +/*! + * Copyright (c) 2015, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ - this.finish = function () { - onCorkedFinish(_this, state); - }; +var net = __nccwpck_require__(11631); +var urlParse = __nccwpck_require__(78835).parse; +var util = __nccwpck_require__(31669); +var pubsuffix = __nccwpck_require__(34964); +var Store = __nccwpck_require__(11013)/* .Store */ .y; +var MemoryCookieStore = __nccwpck_require__(73533)/* .MemoryCookieStore */ .m; +var pathMatch = __nccwpck_require__(30495)/* .pathMatch */ .U; +var VERSION = __nccwpck_require__(30380); + +var punycode; +try { + punycode = __nccwpck_require__(94213); +} catch(e) { + console.warn("tough-cookie: can't load punycode; won't use punycode for domain normalization"); } -/* */ -/**/ +// From RFC6265 S4.1.1 +// note that it excludes \x3B ";" +var COOKIE_OCTETS = /^[\x21\x23-\x2B\x2D-\x3A\x3C-\x5B\x5D-\x7E]+$/; +var CONTROL_CHARS = /[\x00-\x1F]/; -var Duplex; -/**/ +// From Chromium // '\r', '\n' and '\0' should be treated as a terminator in +// the "relaxed" mode, see: +// https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/parsed_cookie.cc#L60 +var TERMINATORS = ['\n', '\r', '\0']; -Writable.WritableState = WritableState; -/**/ +// RFC6265 S4.1.1 defines path value as 'any CHAR except CTLs or ";"' +// Note ';' is \x3B +var PATH_VALUE = /[\x20-\x3A\x3C-\x7E]+/; -var internalUtil = { - deprecate: __nccwpck_require__(65278) -}; -/**/ +// date-time parsing constants (RFC6265 S5.1.1) -/**/ +var DATE_DELIM = /[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]/; -var Stream = __nccwpck_require__(62387); -/**/ +var MONTH_TO_NUM = { + jan:0, feb:1, mar:2, apr:3, may:4, jun:5, + jul:6, aug:7, sep:8, oct:9, nov:10, dec:11 +}; +var NUM_TO_MONTH = [ + 'Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec' +]; +var NUM_TO_DAY = [ + 'Sun','Mon','Tue','Wed','Thu','Fri','Sat' +]; +var MAX_TIME = 2147483647000; // 31-bit max +var MIN_TIME = 0; // 31-bit min -var Buffer = __nccwpck_require__(64293).Buffer; +/* + * Parses a Natural number (i.e., non-negative integer) with either the + * *DIGIT ( non-digit *OCTET ) + * or + * *DIGIT + * grammar (RFC6265 S5.1.1). + * + * The "trailingOK" boolean controls if the grammar accepts a + * "( non-digit *OCTET )" trailer. + */ +function parseDigits(token, minDigits, maxDigits, trailingOK) { + var count = 0; + while (count < token.length) { + var c = token.charCodeAt(count); + // "non-digit = %x00-2F / %x3A-FF" + if (c <= 0x2F || c >= 0x3A) { + break; + } + count++; + } -var OurUint8Array = global.Uint8Array || function () {}; + // constrain to a minimum and maximum number of digits. + if (count < minDigits || count > maxDigits) { + return null; + } -function _uint8ArrayToBuffer(chunk) { - return Buffer.from(chunk); -} + if (!trailingOK && count != token.length) { + return null; + } -function _isUint8Array(obj) { - return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; + return parseInt(token.substr(0,count), 10); } -var destroyImpl = __nccwpck_require__(97049); +function parseTime(token) { + var parts = token.split(':'); + var result = [0,0,0]; -var _require = __nccwpck_require__(39948), - getHighWaterMark = _require.getHighWaterMark; + /* RF6256 S5.1.1: + * time = hms-time ( non-digit *OCTET ) + * hms-time = time-field ":" time-field ":" time-field + * time-field = 1*2DIGIT + */ -var _require$codes = __nccwpck_require__(67214)/* .codes */ .q, - ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, - ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, - ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, - ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, - ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, - ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, - ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, - ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; + if (parts.length !== 3) { + return null; + } -var errorOrDestroy = destroyImpl.errorOrDestroy; + for (var i = 0; i < 3; i++) { + // "time-field" must be strictly "1*2DIGIT", HOWEVER, "hms-time" can be + // followed by "( non-digit *OCTET )" so therefore the last time-field can + // have a trailer + var trailingOK = (i == 2); + var num = parseDigits(parts[i], 1, 2, trailingOK); + if (num === null) { + return null; + } + result[i] = num; + } -__nccwpck_require__(44124)(Writable, Stream); + return result; +} -function nop() {} +function parseMonth(token) { + token = String(token).substr(0,3).toLowerCase(); + var num = MONTH_TO_NUM[token]; + return num >= 0 ? num : null; +} -function WritableState(options, stream, isDuplex) { - Duplex = Duplex || __nccwpck_require__(41359); - options = options || {}; // Duplex streams are both readable and writable, but share - // the same options object. - // However, some cases require setting options to different - // values for the readable and the writable sides of the duplex stream, - // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. +/* + * RFC6265 S5.1.1 date parser (see RFC for full grammar) + */ +function parseDate(str) { + if (!str) { + return; + } - if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag to indicate whether or not this stream - // contains buffers or objects. + /* RFC6265 S5.1.1: + * 2. Process each date-token sequentially in the order the date-tokens + * appear in the cookie-date + */ + var tokens = str.split(DATE_DELIM); + if (!tokens) { + return; + } - this.objectMode = !!options.objectMode; - if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; // the point at which write() starts returning false - // Note: 0 is a valid value, means that we always return false if - // the entire buffer is not flushed immediately on write() + var hour = null; + var minute = null; + var second = null; + var dayOfMonth = null; + var month = null; + var year = null; - this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); // if _final has been called + for (var i=0; i= 70 && year <= 99) { + year += 1900; + } else if (year >= 0 && year <= 69) { + year += 2000; + } + } + } + } - this.destroyed = false; // should we decode strings into buffers before passing to _write? - // this is here so that some node-core streams can optimize string - // handling at a lower level. + /* RFC 6265 S5.1.1 + * "5. Abort these steps and fail to parse the cookie-date if: + * * at least one of the found-day-of-month, found-month, found- + * year, or found-time flags is not set, + * * the day-of-month-value is less than 1 or greater than 31, + * * the year-value is less than 1601, + * * the hour-value is greater than 23, + * * the minute-value is greater than 59, or + * * the second-value is greater than 59. + * (Note that leap seconds cannot be represented in this syntax.)" + * + * So, in order as above: + */ + if ( + dayOfMonth === null || month === null || year === null || second === null || + dayOfMonth < 1 || dayOfMonth > 31 || + year < 1601 || + hour > 23 || + minute > 59 || + second > 59 + ) { + return; + } - var noDecode = options.decodeStrings === false; - this.decodeStrings = !noDecode; // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. + return new Date(Date.UTC(year, month, dayOfMonth, hour, minute, second)); +} - this.defaultEncoding = options.defaultEncoding || 'utf8'; // not an actual buffer we keep track of, but a measurement - // of how much we're waiting to get pushed to some underlying - // socket or file. +function formatDate(date) { + var d = date.getUTCDate(); d = d >= 10 ? d : '0'+d; + var h = date.getUTCHours(); h = h >= 10 ? h : '0'+h; + var m = date.getUTCMinutes(); m = m >= 10 ? m : '0'+m; + var s = date.getUTCSeconds(); s = s >= 10 ? s : '0'+s; + return NUM_TO_DAY[date.getUTCDay()] + ', ' + + d+' '+ NUM_TO_MONTH[date.getUTCMonth()] +' '+ date.getUTCFullYear() +' '+ + h+':'+m+':'+s+' GMT'; +} - this.length = 0; // a flag to see when we're in the middle of a write. +// S5.1.2 Canonicalized Host Names +function canonicalDomain(str) { + if (str == null) { + return null; + } + str = str.trim().replace(/^\./,''); // S4.1.2.3 & S5.2.3: ignore leading . - this.writing = false; // when true all writes will be buffered until .uncork() call + // convert to IDN if any non-ASCII characters + if (punycode && /[^\u0001-\u007f]/.test(str)) { + str = punycode.toASCII(str); + } - this.corked = 0; // a flag to be able to tell if the onwrite cb is called immediately, - // or on a later tick. We set this to true at first, because any - // actions that shouldn't happen until "later" should generally also - // not happen before the first write call. + return str.toLowerCase(); +} - this.sync = true; // a flag to know if we're processing previously buffered items, which - // may call the _write() callback in the same tick, so that we don't - // end up in an overlapped onwrite situation. +// S5.1.3 Domain Matching +function domainMatch(str, domStr, canonicalize) { + if (str == null || domStr == null) { + return null; + } + if (canonicalize !== false) { + str = canonicalDomain(str); + domStr = canonicalDomain(domStr); + } - this.bufferProcessing = false; // the callback that's passed to _write(chunk,cb) + /* + * "The domain string and the string are identical. (Note that both the + * domain string and the string will have been canonicalized to lower case at + * this point)" + */ + if (str == domStr) { + return true; + } - this.onwrite = function (er) { - onwrite(stream, er); - }; // the callback that the user supplies to write(chunk,encoding,cb) + /* "All of the following [three] conditions hold:" (order adjusted from the RFC) */ + /* "* The string is a host name (i.e., not an IP address)." */ + if (net.isIP(str)) { + return false; + } - this.writecb = null; // the amount that is being written when _write is called. + /* "* The domain string is a suffix of the string" */ + var idx = str.indexOf(domStr); + if (idx <= 0) { + return false; // it's a non-match (-1) or prefix (0) + } - this.writelen = 0; - this.bufferedRequest = null; - this.lastBufferedRequest = null; // number of pending user-supplied write callbacks - // this must be 0 before 'finish' can be emitted + // e.g "a.b.c".indexOf("b.c") === 2 + // 5 === 3+2 + if (str.length !== domStr.length + idx) { // it's not a suffix + return false; + } - this.pendingcb = 0; // emit prefinish if the only thing we're waiting for is _write cbs - // This is relevant for synchronous Transform streams + /* "* The last character of the string that is not included in the domain + * string is a %x2E (".") character." */ + if (str.substr(idx-1,1) !== '.') { + return false; + } - this.prefinished = false; // True if the error was already emitted and should not be thrown again + return true; +} - this.errorEmitted = false; // Should close be emitted on destroy. Defaults to true. - this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'finish' (and potentially 'end') +// RFC6265 S5.1.4 Paths and Path-Match - this.autoDestroy = !!options.autoDestroy; // count buffered requests +/* + * "The user agent MUST use an algorithm equivalent to the following algorithm + * to compute the default-path of a cookie:" + * + * Assumption: the path (and not query part or absolute uri) is passed in. + */ +function defaultPath(path) { + // "2. If the uri-path is empty or if the first character of the uri-path is not + // a %x2F ("/") character, output %x2F ("/") and skip the remaining steps. + if (!path || path.substr(0,1) !== "/") { + return "/"; + } - this.bufferedRequestCount = 0; // allocate the first CorkedRequest, there is always - // one allocated and free to use, and we maintain at most two + // "3. If the uri-path contains no more than one %x2F ("/") character, output + // %x2F ("/") and skip the remaining step." + if (path === "/") { + return path; + } - this.corkedRequestsFree = new CorkedRequest(this); -} + var rightSlash = path.lastIndexOf("/"); + if (rightSlash === 0) { + return "/"; + } -WritableState.prototype.getBuffer = function getBuffer() { - var current = this.bufferedRequest; - var out = []; + // "4. Output the characters of the uri-path from the first character up to, + // but not including, the right-most %x2F ("/")." + return path.slice(0, rightSlash); +} - while (current) { - out.push(current); - current = current.next; +function trimTerminator(str) { + for (var t = 0; t < TERMINATORS.length; t++) { + var terminatorIdx = str.indexOf(TERMINATORS[t]); + if (terminatorIdx !== -1) { + str = str.substr(0,terminatorIdx); + } } - return out; -}; + return str; +} -(function () { - try { - Object.defineProperty(WritableState.prototype, 'buffer', { - get: internalUtil.deprecate(function writableStateBufferGetter() { - return this.getBuffer(); - }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') - }); - } catch (_) {} -})(); // Test _writableState for inheritance to account for Duplex streams, -// whose prototype chain only points to Readable. +function parseCookiePair(cookiePair, looseMode) { + cookiePair = trimTerminator(cookiePair); + + var firstEq = cookiePair.indexOf('='); + if (looseMode) { + if (firstEq === 0) { // '=' is immediately at start + cookiePair = cookiePair.substr(1); + firstEq = cookiePair.indexOf('='); // might still need to split on '=' + } + } else { // non-loose mode + if (firstEq <= 0) { // no '=' or is at start + return; // needs to have non-empty "cookie-name" + } + } + var cookieName, cookieValue; + if (firstEq <= 0) { + cookieName = ""; + cookieValue = cookiePair.trim(); + } else { + cookieName = cookiePair.substr(0, firstEq).trim(); + cookieValue = cookiePair.substr(firstEq+1).trim(); + } -var realHasInstance; + if (CONTROL_CHARS.test(cookieName) || CONTROL_CHARS.test(cookieValue)) { + return; + } -if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { - realHasInstance = Function.prototype[Symbol.hasInstance]; - Object.defineProperty(Writable, Symbol.hasInstance, { - value: function value(object) { - if (realHasInstance.call(this, object)) return true; - if (this !== Writable) return false; - return object && object._writableState instanceof WritableState; - } - }); -} else { - realHasInstance = function realHasInstance(object) { - return object instanceof this; - }; + var c = new Cookie(); + c.key = cookieName; + c.value = cookieValue; + return c; } -function Writable(options) { - Duplex = Duplex || __nccwpck_require__(41359); // Writable ctor is applied to Duplexes, too. - // `realHasInstance` is necessary because using plain `instanceof` - // would return false, as no `_writableState` property is attached. - // Trying to use the custom `instanceof` for Writable here will also break the - // Node.js LazyTransform implementation, which has a non-trivial getter for - // `_writableState` that would lead to infinite recursion. - // Checking for a Stream.Duplex instance is faster here instead of inside - // the WritableState constructor, at least with V8 6.5 +function parse(str, options) { + if (!options || typeof options !== 'object') { + options = {}; + } + str = str.trim(); - var isDuplex = this instanceof Duplex; - if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); - this._writableState = new WritableState(options, this, isDuplex); // legacy. + // We use a regex to parse the "name-value-pair" part of S5.2 + var firstSemi = str.indexOf(';'); // S5.2 step 1 + var cookiePair = (firstSemi === -1) ? str : str.substr(0, firstSemi); + var c = parseCookiePair(cookiePair, !!options.loose); + if (!c) { + return; + } - this.writable = true; + if (firstSemi === -1) { + return c; + } - if (options) { - if (typeof options.write === 'function') this._write = options.write; - if (typeof options.writev === 'function') this._writev = options.writev; - if (typeof options.destroy === 'function') this._destroy = options.destroy; - if (typeof options.final === 'function') this._final = options.final; + // S5.2.3 "unparsed-attributes consist of the remainder of the set-cookie-string + // (including the %x3B (";") in question)." plus later on in the same section + // "discard the first ";" and trim". + var unparsed = str.slice(firstSemi + 1).trim(); + + // "If the unparsed-attributes string is empty, skip the rest of these + // steps." + if (unparsed.length === 0) { + return c; } - Stream.call(this); -} // Otherwise people can pipe Writable streams, which is just wrong. + /* + * S5.2 says that when looping over the items "[p]rocess the attribute-name + * and attribute-value according to the requirements in the following + * subsections" for every item. Plus, for many of the individual attributes + * in S5.3 it says to use the "attribute-value of the last attribute in the + * cookie-attribute-list". Therefore, in this implementation, we overwrite + * the previous value. + */ + var cookie_avs = unparsed.split(';'); + while (cookie_avs.length) { + var av = cookie_avs.shift().trim(); + if (av.length === 0) { // happens if ";;" appears + continue; + } + var av_sep = av.indexOf('='); + var av_key, av_value; + + if (av_sep === -1) { + av_key = av; + av_value = null; + } else { + av_key = av.substr(0,av_sep); + av_value = av.substr(av_sep+1); + } + av_key = av_key.trim().toLowerCase(); -Writable.prototype.pipe = function () { - errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); -}; + if (av_value) { + av_value = av_value.trim(); + } -function writeAfterEnd(stream, cb) { - var er = new ERR_STREAM_WRITE_AFTER_END(); // TODO: defer error events consistently everywhere, not just the cb + switch(av_key) { + case 'expires': // S5.2.1 + if (av_value) { + var exp = parseDate(av_value); + // "If the attribute-value failed to parse as a cookie date, ignore the + // cookie-av." + if (exp) { + // over and underflow not realistically a concern: V8's getTime() seems to + // store something larger than a 32-bit time_t (even with 32-bit node) + c.expires = exp; + } + } + break; - errorOrDestroy(stream, er); - process.nextTick(cb, er); -} // Checks that a user-supplied chunk is valid, especially for the particular -// mode the stream is in. Currently this means that `null` is never accepted -// and undefined/non-string values are only allowed in object mode. + case 'max-age': // S5.2.2 + if (av_value) { + // "If the first character of the attribute-value is not a DIGIT or a "-" + // character ...[or]... If the remainder of attribute-value contains a + // non-DIGIT character, ignore the cookie-av." + if (/^-?[0-9]+$/.test(av_value)) { + var delta = parseInt(av_value, 10); + // "If delta-seconds is less than or equal to zero (0), let expiry-time + // be the earliest representable date and time." + c.setMaxAge(delta); + } + } + break; + case 'domain': // S5.2.3 + // "If the attribute-value is empty, the behavior is undefined. However, + // the user agent SHOULD ignore the cookie-av entirely." + if (av_value) { + // S5.2.3 "Let cookie-domain be the attribute-value without the leading %x2E + // (".") character." + var domain = av_value.trim().replace(/^\./, ''); + if (domain) { + // "Convert the cookie-domain to lower case." + c.domain = domain.toLowerCase(); + } + } + break; -function validChunk(stream, state, chunk, cb) { - var er; + case 'path': // S5.2.4 + /* + * "If the attribute-value is empty or if the first character of the + * attribute-value is not %x2F ("/"): + * Let cookie-path be the default-path. + * Otherwise: + * Let cookie-path be the attribute-value." + * + * We'll represent the default-path as null since it depends on the + * context of the parsing. + */ + c.path = av_value && av_value[0] === "/" ? av_value : null; + break; - if (chunk === null) { - er = new ERR_STREAM_NULL_VALUES(); - } else if (typeof chunk !== 'string' && !state.objectMode) { - er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); - } + case 'secure': // S5.2.5 + /* + * "If the attribute-name case-insensitively matches the string "Secure", + * the user agent MUST append an attribute to the cookie-attribute-list + * with an attribute-name of Secure and an empty attribute-value." + */ + c.secure = true; + break; - if (er) { - errorOrDestroy(stream, er); - process.nextTick(cb, er); - return false; + case 'httponly': // S5.2.6 -- effectively the same as 'secure' + c.httpOnly = true; + break; + + default: + c.extensions = c.extensions || []; + c.extensions.push(av); + break; + } } - return true; + return c; } -Writable.prototype.write = function (chunk, encoding, cb) { - var state = this._writableState; - var ret = false; - - var isBuf = !state.objectMode && _isUint8Array(chunk); - - if (isBuf && !Buffer.isBuffer(chunk)) { - chunk = _uint8ArrayToBuffer(chunk); +// avoid the V8 deoptimization monster! +function jsonParse(str) { + var obj; + try { + obj = JSON.parse(str); + } catch (e) { + return e; } + return obj; +} - if (typeof encoding === 'function') { - cb = encoding; - encoding = null; +function fromJSON(str) { + if (!str) { + return null; } - if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; - if (typeof cb !== 'function') cb = nop; - if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { - state.pendingcb++; - ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + var obj; + if (typeof str === 'string') { + obj = jsonParse(str); + if (obj instanceof Error) { + return null; + } + } else { + // assume it's an Object + obj = str; } - return ret; -}; -Writable.prototype.cork = function () { - this._writableState.corked++; -}; + var c = new Cookie(); + for (var i=0; i -1)) throw new ERR_UNKNOWN_ENCODING(encoding); - this._writableState.defaultEncoding = encoding; - return this; -}; +function cookieCompare(a,b) { + var cmp = 0; -Object.defineProperty(Writable.prototype, 'writableBuffer', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState && this._writableState.getBuffer(); + // descending for length: b CMP a + var aPathLen = a.path ? a.path.length : 0; + var bPathLen = b.path ? b.path.length : 0; + cmp = bPathLen - aPathLen; + if (cmp !== 0) { + return cmp; } -}); -function decodeChunk(state, chunk, encoding) { - if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { - chunk = Buffer.from(chunk, encoding); + // ascending for time: a CMP b + var aTime = a.creation ? a.creation.getTime() : MAX_TIME; + var bTime = b.creation ? b.creation.getTime() : MAX_TIME; + cmp = aTime - bTime; + if (cmp !== 0) { + return cmp; } - return chunk; + // break ties for the same millisecond (precision of JavaScript's clock) + cmp = a.creationIndex - b.creationIndex; + + return cmp; } -Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState.highWaterMark; +// Gives the permutation of all possible pathMatch()es of a given path. The +// array is in longest-to-shortest order. Handy for indexing. +function permutePath(path) { + if (path === '/') { + return ['/']; } -}); // if we're already writing something, then just put this -// in the queue, and wait our turn. Otherwise, call _write -// If we return false, then we need a drain event, so set that flag. - -function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { - if (!isBuf) { - var newChunk = decodeChunk(state, chunk, encoding); - - if (chunk !== newChunk) { - isBuf = true; - encoding = 'buffer'; - chunk = newChunk; + if (path.lastIndexOf('/') === path.length-1) { + path = path.substr(0,path.length-1); + } + var permutations = [path]; + while (path.length > 1) { + var lindex = path.lastIndexOf('/'); + if (lindex === 0) { + break; } + path = path.substr(0,lindex); + permutations.push(path); } + permutations.push('/'); + return permutations; +} - var len = state.objectMode ? 1 : chunk.length; - state.length += len; - var ret = state.length < state.highWaterMark; // we must ensure that previous needDrain will not be reset to false. +function getCookieContext(url) { + if (url instanceof Object) { + return url; + } + // NOTE: decodeURI will throw on malformed URIs (see GH-32). + // Therefore, we will just skip decoding for such URIs. + try { + url = decodeURI(url); + } + catch(err) { + // Silently swallow error + } - if (!ret) state.needDrain = true; + return urlParse(url); +} - if (state.writing || state.corked) { - var last = state.lastBufferedRequest; - state.lastBufferedRequest = { - chunk: chunk, - encoding: encoding, - isBuf: isBuf, - callback: cb, - next: null - }; +function Cookie(options) { + options = options || {}; - if (last) { - last.next = state.lastBufferedRequest; - } else { - state.bufferedRequest = state.lastBufferedRequest; + Object.keys(options).forEach(function(prop) { + if (Cookie.prototype.hasOwnProperty(prop) && + Cookie.prototype[prop] !== options[prop] && + prop.substr(0,1) !== '_') + { + this[prop] = options[prop]; } + }, this); - state.bufferedRequestCount += 1; - } else { - doWrite(stream, state, false, len, chunk, encoding, cb); - } + this.creation = this.creation || new Date(); - return ret; + // used to break creation ties in cookieCompare(): + Object.defineProperty(this, 'creationIndex', { + configurable: false, + enumerable: false, // important for assert.deepEqual checks + writable: true, + value: ++Cookie.cookiesCreated + }); } -function doWrite(stream, state, writev, len, chunk, encoding, cb) { - state.writelen = len; - state.writecb = cb; - state.writing = true; - state.sync = true; - if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); - state.sync = false; -} +Cookie.cookiesCreated = 0; // incremented each time a cookie is created -function onwriteError(stream, state, sync, er, cb) { - --state.pendingcb; +Cookie.parse = parse; +Cookie.fromJSON = fromJSON; - if (sync) { - // defer the callback if we are being called synchronously - // to avoid piling up things on the stack - process.nextTick(cb, er); // this can emit finish, and it will always happen - // after error +Cookie.prototype.key = ""; +Cookie.prototype.value = ""; - process.nextTick(finishMaybe, stream, state); - stream._writableState.errorEmitted = true; - errorOrDestroy(stream, er); - } else { - // the caller expect this to happen before if - // it is async - cb(er); - stream._writableState.errorEmitted = true; - errorOrDestroy(stream, er); // this can emit finish, but finish must - // always follow error +// the order in which the RFC has them: +Cookie.prototype.expires = "Infinity"; // coerces to literal Infinity +Cookie.prototype.maxAge = null; // takes precedence over expires for TTL +Cookie.prototype.domain = null; +Cookie.prototype.path = null; +Cookie.prototype.secure = false; +Cookie.prototype.httpOnly = false; +Cookie.prototype.extensions = null; - finishMaybe(stream, state); - } -} +// set by the CookieJar: +Cookie.prototype.hostOnly = null; // boolean when set +Cookie.prototype.pathIsDefault = null; // boolean when set +Cookie.prototype.creation = null; // Date when set; defaulted by Cookie.parse +Cookie.prototype.lastAccessed = null; // Date when set +Object.defineProperty(Cookie.prototype, 'creationIndex', { + configurable: true, + enumerable: false, + writable: true, + value: 0 +}); -function onwriteStateUpdate(state) { - state.writing = false; - state.writecb = null; - state.length -= state.writelen; - state.writelen = 0; +Cookie.serializableProperties = Object.keys(Cookie.prototype) + .filter(function(prop) { + return !( + Cookie.prototype[prop] instanceof Function || + prop === 'creationIndex' || + prop.substr(0,1) === '_' + ); + }); + +Cookie.prototype.inspect = function inspect() { + var now = Date.now(); + return 'Cookie="'+this.toString() + + '; hostOnly='+(this.hostOnly != null ? this.hostOnly : '?') + + '; aAge='+(this.lastAccessed ? (now-this.lastAccessed.getTime())+'ms' : '?') + + '; cAge='+(this.creation ? (now-this.creation.getTime())+'ms' : '?') + + '"'; +}; + +// Use the new custom inspection symbol to add the custom inspect function if +// available. +if (util.inspect.custom) { + Cookie.prototype[util.inspect.custom] = Cookie.prototype.inspect; } -function onwrite(stream, er) { - var state = stream._writableState; - var sync = state.sync; - var cb = state.writecb; - if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); - onwriteStateUpdate(state); - if (er) onwriteError(stream, state, sync, er, cb);else { - // Check if we're actually ready to finish, but don't emit yet - var finished = needFinish(state) || stream.destroyed; +Cookie.prototype.toJSON = function() { + var obj = {}; - if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { - clearBuffer(stream, state); + var props = Cookie.serializableProperties; + for (var i=0; i { + if (cookie.hostOnly == null) { // don't reset if already set + cookie.hostOnly = false; + } -"use strict"; + } else { + cookie.hostOnly = true; + cookie.domain = host; + } + //S5.2.4 If the attribute-value is empty or if the first character of the + //attribute-value is not %x2F ("/"): + //Let cookie-path be the default-path. + if (!cookie.path || cookie.path[0] !== '/') { + cookie.path = defaultPath(context.pathname); + cookie.pathIsDefault = true; + } -var _Object$setPrototypeO; + // S5.3 step 8: NOOP; secure attribute + // S5.3 step 9: NOOP; httpOnly attribute -function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } + // S5.3 step 10 + if (options.http === false && cookie.httpOnly) { + err = new Error("Cookie is HttpOnly and this isn't an HTTP API"); + return cb(options.ignoreError ? null : err); + } -var finished = __nccwpck_require__(76080); + var store = this.store; -var kLastResolve = Symbol('lastResolve'); -var kLastReject = Symbol('lastReject'); -var kError = Symbol('error'); -var kEnded = Symbol('ended'); -var kLastPromise = Symbol('lastPromise'); -var kHandlePromise = Symbol('handlePromise'); -var kStream = Symbol('stream'); + if (!store.updateCookie) { + store.updateCookie = function(oldCookie, newCookie, cb) { + this.putCookie(newCookie, cb); + }; + } -function createIterResult(value, done) { - return { - value: value, - done: done - }; -} + function withCookie(err, oldCookie) { + if (err) { + return cb(err); + } -function readAndResolve(iter) { - var resolve = iter[kLastResolve]; + var next = function(err) { + if (err) { + return cb(err); + } else { + cb(null, cookie); + } + }; - if (resolve !== null) { - var data = iter[kStream].read(); // we defer if data is null - // we can be expecting either 'end' or - // 'error' + if (oldCookie) { + // S5.3 step 11 - "If the cookie store contains a cookie with the same name, + // domain, and path as the newly created cookie:" + if (options.http === false && oldCookie.httpOnly) { // step 11.2 + err = new Error("old Cookie is HttpOnly and this isn't an HTTP API"); + return cb(options.ignoreError ? null : err); + } + cookie.creation = oldCookie.creation; // step 11.3 + cookie.creationIndex = oldCookie.creationIndex; // preserve tie-breaker + cookie.lastAccessed = now; + // Step 11.4 (delete cookie) is implied by just setting the new one: + store.updateCookie(oldCookie, cookie, next); // step 12 - if (data !== null) { - iter[kLastPromise] = null; - iter[kLastResolve] = null; - iter[kLastReject] = null; - resolve(createIterResult(data, false)); + } else { + cookie.creation = cookie.lastAccessed = now; + store.putCookie(cookie, next); // step 12 } } -} -function onReadable(iter) { - // we wait for the next tick, because it might - // emit an error with process.nextTick - process.nextTick(readAndResolve, iter); -} + store.findCookie(cookie.domain, cookie.path, cookie.key, withCookie); +}; -function wrapForNext(lastPromise, iter) { - return function (resolve, reject) { - lastPromise.then(function () { - if (iter[kEnded]) { - resolve(createIterResult(undefined, true)); - return; - } +// RFC6365 S5.4 +CAN_BE_SYNC.push('getCookies'); +CookieJar.prototype.getCookies = function(url, options, cb) { + var context = getCookieContext(url); + if (options instanceof Function) { + cb = options; + options = {}; + } - iter[kHandlePromise](resolve, reject); - }, reject); - }; -} + var host = canonicalDomain(context.hostname); + var path = context.pathname || '/'; -var AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); -var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = { - get stream() { - return this[kStream]; - }, + var secure = options.secure; + if (secure == null && context.protocol && + (context.protocol == 'https:' || context.protocol == 'wss:')) + { + secure = true; + } - next: function next() { - var _this = this; + var http = options.http; + if (http == null) { + http = true; + } - // if we have detected an error in the meanwhile - // reject straight away - var error = this[kError]; + var now = options.now || Date.now(); + var expireCheck = options.expire !== false; + var allPaths = !!options.allPaths; + var store = this.store; - if (error !== null) { - return Promise.reject(error); + function matchingCookie(c) { + // "Either: + // The cookie's host-only-flag is true and the canonicalized + // request-host is identical to the cookie's domain. + // Or: + // The cookie's host-only-flag is false and the canonicalized + // request-host domain-matches the cookie's domain." + if (c.hostOnly) { + if (c.domain != host) { + return false; + } + } else { + if (!domainMatch(host, c.domain, false)) { + return false; + } } - if (this[kEnded]) { - return Promise.resolve(createIterResult(undefined, true)); + // "The request-uri's path path-matches the cookie's path." + if (!allPaths && !pathMatch(path, c.path)) { + return false; } - if (this[kStream].destroyed) { - // We need to defer via nextTick because if .destroy(err) is - // called, the error will be emitted via nextTick, and - // we cannot guarantee that there is no error lingering around - // waiting to be emitted. - return new Promise(function (resolve, reject) { - process.nextTick(function () { - if (_this[kError]) { - reject(_this[kError]); - } else { - resolve(createIterResult(undefined, true)); - } - }); - }); - } // if we have multiple next() calls - // we will wait for the previous Promise to finish - // this logic is optimized to support for await loops, - // where next() is only called once at a time - - - var lastPromise = this[kLastPromise]; - var promise; - - if (lastPromise) { - promise = new Promise(wrapForNext(lastPromise, this)); - } else { - // fast path needed to support multiple this.push() - // without triggering the next() queue - var data = this[kStream].read(); + // "If the cookie's secure-only-flag is true, then the request-uri's + // scheme must denote a "secure" protocol" + if (c.secure && !secure) { + return false; + } - if (data !== null) { - return Promise.resolve(createIterResult(data, false)); - } + // "If the cookie's http-only-flag is true, then exclude the cookie if the + // cookie-string is being generated for a "non-HTTP" API" + if (c.httpOnly && !http) { + return false; + } - promise = new Promise(this[kHandlePromise]); + // deferred from S5.3 + // non-RFC: allow retention of expired cookies by choice + if (expireCheck && c.expiryTime() <= now) { + store.removeCookie(c.domain, c.path, c.key, function(){}); // result ignored + return false; } - this[kLastPromise] = promise; - return promise; + return true; } -}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () { - return this; -}), _defineProperty(_Object$setPrototypeO, "return", function _return() { - var _this2 = this; - // destroy(err, cb) is a private API - // we can guarantee we have that here, because we control the - // Readable class this is attached to - return new Promise(function (resolve, reject) { - _this2[kStream].destroy(null, function (err) { - if (err) { - reject(err); - return; - } + store.findCookies(host, allPaths ? null : path, function(err,cookies) { + if (err) { + return cb(err); + } - resolve(createIterResult(undefined, true)); - }); - }); -}), _Object$setPrototypeO), AsyncIteratorPrototype); + cookies = cookies.filter(matchingCookie); -var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) { - var _Object$create; + // sorting of S5.4 part 2 + if (options.sort !== false) { + cookies = cookies.sort(cookieCompare); + } - var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, { - value: stream, - writable: true - }), _defineProperty(_Object$create, kLastResolve, { - value: null, - writable: true - }), _defineProperty(_Object$create, kLastReject, { - value: null, - writable: true - }), _defineProperty(_Object$create, kError, { - value: null, - writable: true - }), _defineProperty(_Object$create, kEnded, { - value: stream._readableState.endEmitted, - writable: true - }), _defineProperty(_Object$create, kHandlePromise, { - value: function value(resolve, reject) { - var data = iterator[kStream].read(); + // S5.4 part 3 + var now = new Date(); + cookies.forEach(function(c) { + c.lastAccessed = now; + }); + // TODO persist lastAccessed - if (data) { - iterator[kLastPromise] = null; - iterator[kLastResolve] = null; - iterator[kLastReject] = null; - resolve(createIterResult(data, false)); - } else { - iterator[kLastResolve] = resolve; - iterator[kLastReject] = reject; - } - }, - writable: true - }), _Object$create)); - iterator[kLastPromise] = null; - finished(stream, function (err) { - if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { - var reject = iterator[kLastReject]; // reject if we are waiting for data in the Promise - // returned by next() and store the error + cb(null,cookies); + }); +}; - if (reject !== null) { - iterator[kLastPromise] = null; - iterator[kLastResolve] = null; - iterator[kLastReject] = null; - reject(err); - } +CAN_BE_SYNC.push('getCookieString'); +CookieJar.prototype.getCookieString = function(/*..., cb*/) { + var args = Array.prototype.slice.call(arguments,0); + var cb = args.pop(); + var next = function(err,cookies) { + if (err) { + cb(err); + } else { + cb(null, cookies + .sort(cookieCompare) + .map(function(c){ + return c.cookieString(); + }) + .join('; ')); + } + }; + args.push(next); + this.getCookies.apply(this,args); +}; - iterator[kError] = err; - return; +CAN_BE_SYNC.push('getSetCookieStrings'); +CookieJar.prototype.getSetCookieStrings = function(/*..., cb*/) { + var args = Array.prototype.slice.call(arguments,0); + var cb = args.pop(); + var next = function(err,cookies) { + if (err) { + cb(err); + } else { + cb(null, cookies.map(function(c){ + return c.toString(); + })); } + }; + args.push(next); + this.getCookies.apply(this,args); +}; - var resolve = iterator[kLastResolve]; +CAN_BE_SYNC.push('serialize'); +CookieJar.prototype.serialize = function(cb) { + var type = this.store.constructor.name; + if (type === 'Object') { + type = null; + } - if (resolve !== null) { - iterator[kLastPromise] = null; - iterator[kLastResolve] = null; - iterator[kLastReject] = null; - resolve(createIterResult(undefined, true)); - } + // update README.md "Serialization Format" if you change this, please! + var serialized = { + // The version of tough-cookie that serialized this jar. Generally a good + // practice since future versions can make data import decisions based on + // known past behavior. When/if this matters, use `semver`. + version: 'tough-cookie@'+VERSION, - iterator[kEnded] = true; - }); - stream.on('readable', onReadable.bind(null, iterator)); - return iterator; -}; + // add the store type, to make humans happy: + storeType: type, -module.exports = createReadableStreamAsyncIterator; + // CookieJar configuration: + rejectPublicSuffixes: !!this.rejectPublicSuffixes, -/***/ }), + // this gets filled from getAllCookies: + cookies: [] + }; -/***/ 52746: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (!(this.store.getAllCookies && + typeof this.store.getAllCookies === 'function')) + { + return cb(new Error('store does not support getAllCookies and cannot be serialized')); + } -"use strict"; + this.store.getAllCookies(function(err,cookies) { + if (err) { + return cb(err); + } + serialized.cookies = cookies.map(function(cookie) { + // convert to serialized 'raw' cookies + cookie = (cookie instanceof Cookie) ? cookie.toJSON() : cookie; -function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; } + // Remove the index so new ones get assigned during deserialization + delete cookie.creationIndex; -function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; } + return cookie; + }); -function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } + return cb(null, serialized); + }); +}; -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } +// well-known name that JSON.stringify calls +CookieJar.prototype.toJSON = function() { + return this.serializeSync(); +}; -function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } +// use the class method CookieJar.deserialize instead of calling this directly +CAN_BE_SYNC.push('_importCookies'); +CookieJar.prototype._importCookies = function(serialized, cb) { + var jar = this; + var cookies = serialized.cookies; + if (!cookies || !Array.isArray(cookies)) { + return cb(new Error('serialized jar has no cookies array')); + } + cookies = cookies.slice(); // do not modify the original -function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } + function putNext(err) { + if (err) { + return cb(err); + } -var _require = __nccwpck_require__(64293), - Buffer = _require.Buffer; + if (!cookies.length) { + return cb(err, jar); + } -var _require2 = __nccwpck_require__(31669), - inspect = _require2.inspect; + var cookie; + try { + cookie = fromJSON(cookies.shift()); + } catch (e) { + return cb(e); + } -var custom = inspect && inspect.custom || 'inspect'; + if (cookie === null) { + return putNext(null); // skip this cookie + } -function copyBuffer(src, target, offset) { - Buffer.prototype.copy.call(src, target, offset); -} + jar.store.putCookie(cookie, putNext); + } -module.exports = -/*#__PURE__*/ -function () { - function BufferList() { - _classCallCheck(this, BufferList); + putNext(); +}; - this.head = null; - this.tail = null; - this.length = 0; +CookieJar.deserialize = function(strOrObj, store, cb) { + if (arguments.length !== 3) { + // store is optional + cb = store; + store = null; } - _createClass(BufferList, [{ - key: "push", - value: function push(v) { - var entry = { - data: v, - next: null - }; - if (this.length > 0) this.tail.next = entry;else this.head = entry; - this.tail = entry; - ++this.length; - } - }, { - key: "unshift", - value: function unshift(v) { - var entry = { - data: v, - next: this.head - }; - if (this.length === 0) this.tail = entry; - this.head = entry; - ++this.length; - } - }, { - key: "shift", - value: function shift() { - if (this.length === 0) return; - var ret = this.head.data; - if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; - --this.length; - return ret; - } - }, { - key: "clear", - value: function clear() { - this.head = this.tail = null; - this.length = 0; + var serialized; + if (typeof strOrObj === 'string') { + serialized = jsonParse(strOrObj); + if (serialized instanceof Error) { + return cb(serialized); } - }, { - key: "join", - value: function join(s) { - if (this.length === 0) return ''; - var p = this.head; - var ret = '' + p.data; - - while (p = p.next) { - ret += s + p.data; - } + } else { + serialized = strOrObj; + } - return ret; + var jar = new CookieJar(store, serialized.rejectPublicSuffixes); + jar._importCookies(serialized, function(err) { + if (err) { + return cb(err); } - }, { - key: "concat", - value: function concat(n) { - if (this.length === 0) return Buffer.alloc(0); - var ret = Buffer.allocUnsafe(n >>> 0); - var p = this.head; - var i = 0; + cb(null, jar); + }); +}; - while (p) { - copyBuffer(p.data, ret, i); - i += p.data.length; - p = p.next; - } +CookieJar.deserializeSync = function(strOrObj, store) { + var serialized = typeof strOrObj === 'string' ? + JSON.parse(strOrObj) : strOrObj; + var jar = new CookieJar(store, serialized.rejectPublicSuffixes); - return ret; - } // Consumes a specified amount of bytes or characters from the buffered data. + // catch this mistake early: + if (!jar.store.synchronous) { + throw new Error('CookieJar store is not synchronous; use async API instead.'); + } - }, { - key: "consume", - value: function consume(n, hasStrings) { - var ret; + jar._importCookiesSync(serialized); + return jar; +}; +CookieJar.fromJSON = CookieJar.deserializeSync; - if (n < this.head.data.length) { - // `slice` is the same for buffers and strings. - ret = this.head.data.slice(0, n); - this.head.data = this.head.data.slice(n); - } else if (n === this.head.data.length) { - // First chunk is a perfect match. - ret = this.shift(); - } else { - // Result spans more than one buffer. - ret = hasStrings ? this._getString(n) : this._getBuffer(n); - } +CookieJar.prototype.clone = function(newStore, cb) { + if (arguments.length === 1) { + cb = newStore; + newStore = null; + } - return ret; + this.serialize(function(err,serialized) { + if (err) { + return cb(err); } - }, { - key: "first", - value: function first() { - return this.head.data; - } // Consumes a specified amount of characters from the buffered data. + CookieJar.deserialize(serialized, newStore, cb); + }); +}; - }, { - key: "_getString", - value: function _getString(n) { - var p = this.head; - var c = 1; - var ret = p.data; - n -= ret.length; +CAN_BE_SYNC.push('removeAllCookies'); +CookieJar.prototype.removeAllCookies = function(cb) { + var store = this.store; - while (p = p.next) { - var str = p.data; - var nb = n > str.length ? str.length : n; - if (nb === str.length) ret += str;else ret += str.slice(0, n); - n -= nb; + // Check that the store implements its own removeAllCookies(). The default + // implementation in Store will immediately call the callback with a "not + // implemented" Error. + if (store.removeAllCookies instanceof Function && + store.removeAllCookies !== Store.prototype.removeAllCookies) + { + return store.removeAllCookies(cb); + } - if (n === 0) { - if (nb === str.length) { - ++c; - if (p.next) this.head = p.next;else this.head = this.tail = null; - } else { - this.head = p; - p.data = str.slice(nb); - } + store.getAllCookies(function(err, cookies) { + if (err) { + return cb(err); + } - break; - } + if (cookies.length === 0) { + return cb(null); + } - ++c; - } + var completedCount = 0; + var removeErrors = []; - this.length -= c; - return ret; - } // Consumes a specified amount of bytes from the buffered data. + function removeCookieCb(removeErr) { + if (removeErr) { + removeErrors.push(removeErr); + } - }, { - key: "_getBuffer", - value: function _getBuffer(n) { - var ret = Buffer.allocUnsafe(n); - var p = this.head; - var c = 1; - p.data.copy(ret); - n -= p.data.length; + completedCount++; - while (p = p.next) { - var buf = p.data; - var nb = n > buf.length ? buf.length : n; - buf.copy(ret, ret.length - n, 0, nb); - n -= nb; + if (completedCount === cookies.length) { + return cb(removeErrors.length ? removeErrors[0] : null); + } + } - if (n === 0) { - if (nb === buf.length) { - ++c; - if (p.next) this.head = p.next;else this.head = this.tail = null; - } else { - this.head = p; - p.data = buf.slice(nb); - } + cookies.forEach(function(cookie) { + store.removeCookie(cookie.domain, cookie.path, cookie.key, removeCookieCb); + }); + }); +}; - break; - } +CookieJar.prototype._cloneSync = syncWrap('clone'); +CookieJar.prototype.cloneSync = function(newStore) { + if (!newStore.synchronous) { + throw new Error('CookieJar clone destination store is not synchronous; use async API instead.'); + } + return this._cloneSync(newStore); +}; - ++c; - } +// Use a closure to provide a true imperative API for synchronous stores. +function syncWrap(method) { + return function() { + if (!this.store.synchronous) { + throw new Error('CookieJar store is not synchronous; use async API instead.'); + } - this.length -= c; - return ret; - } // Make sure the linked list only shows the minimal necessary information. + var args = Array.prototype.slice.call(arguments); + var syncErr, syncResult; + args.push(function syncCb(err, result) { + syncErr = err; + syncResult = result; + }); + this[method].apply(this, args); - }, { - key: custom, - value: function value(_, options) { - return inspect(this, _objectSpread({}, options, { - // Only inspect one level. - depth: 0, - // It should not recurse. - customInspect: false - })); + if (syncErr) { + throw syncErr; } - }]); + return syncResult; + }; +} + +// wrap all declared CAN_BE_SYNC methods in the sync wrapper +CAN_BE_SYNC.forEach(function(method) { + CookieJar.prototype[method+'Sync'] = syncWrap(method); +}); + +exports.version = VERSION; +exports.CookieJar = CookieJar; +exports.Cookie = Cookie; +exports.Store = Store; +exports.MemoryCookieStore = MemoryCookieStore; +exports.parseDate = parseDate; +exports.formatDate = formatDate; +exports.parse = parse; +exports.fromJSON = fromJSON; +exports.domainMatch = domainMatch; +exports.defaultPath = defaultPath; +exports.pathMatch = pathMatch; +exports.getPublicSuffix = pubsuffix.getPublicSuffix; +exports.cookieCompare = cookieCompare; +exports.permuteDomain = __nccwpck_require__(91478).permuteDomain; +exports.permutePath = permutePath; +exports.canonicalDomain = canonicalDomain; - return BufferList; -}(); /***/ }), -/***/ 97049: -/***/ ((module) => { +/***/ 73533: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; - // undocumented cb() API, needed for core, not for public API - -function destroy(err, cb) { - var _this = this; +/*! + * Copyright (c) 2015, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ - var readableDestroyed = this._readableState && this._readableState.destroyed; - var writableDestroyed = this._writableState && this._writableState.destroyed; +var Store = __nccwpck_require__(11013)/* .Store */ .y; +var permuteDomain = __nccwpck_require__(91478).permuteDomain; +var pathMatch = __nccwpck_require__(30495)/* .pathMatch */ .U; +var util = __nccwpck_require__(31669); - if (readableDestroyed || writableDestroyed) { - if (cb) { - cb(err); - } else if (err) { - if (!this._writableState) { - process.nextTick(emitErrorNT, this, err); - } else if (!this._writableState.errorEmitted) { - this._writableState.errorEmitted = true; - process.nextTick(emitErrorNT, this, err); - } - } +function MemoryCookieStore() { + Store.call(this); + this.idx = {}; +} +util.inherits(MemoryCookieStore, Store); +exports.m = MemoryCookieStore; +MemoryCookieStore.prototype.idx = null; - return this; - } // we set destroyed to true before firing error callbacks in order - // to make it re-entrance safe in case destroy() is called within callbacks +// Since it's just a struct in RAM, this Store is synchronous +MemoryCookieStore.prototype.synchronous = true; +// force a default depth: +MemoryCookieStore.prototype.inspect = function() { + return "{ idx: "+util.inspect(this.idx, false, 2)+' }'; +}; - if (this._readableState) { - this._readableState.destroyed = true; - } // if this is a duplex stream mark the writable part as destroyed as well +// Use the new custom inspection symbol to add the custom inspect function if +// available. +if (util.inspect.custom) { + MemoryCookieStore.prototype[util.inspect.custom] = MemoryCookieStore.prototype.inspect; +} +MemoryCookieStore.prototype.findCookie = function(domain, path, key, cb) { + if (!this.idx[domain]) { + return cb(null,undefined); + } + if (!this.idx[domain][path]) { + return cb(null,undefined); + } + return cb(null,this.idx[domain][path][key]||null); +}; - if (this._writableState) { - this._writableState.destroyed = true; +MemoryCookieStore.prototype.findCookies = function(domain, path, cb) { + var results = []; + if (!domain) { + return cb(null,[]); } - this._destroy(err || null, function (err) { - if (!cb && err) { - if (!_this._writableState) { - process.nextTick(emitErrorAndCloseNT, _this, err); - } else if (!_this._writableState.errorEmitted) { - _this._writableState.errorEmitted = true; - process.nextTick(emitErrorAndCloseNT, _this, err); - } else { - process.nextTick(emitCloseNT, _this); + var pathMatcher; + if (!path) { + // null means "all paths" + pathMatcher = function matchAll(domainIndex) { + for (var curPath in domainIndex) { + var pathIndex = domainIndex[curPath]; + for (var key in pathIndex) { + results.push(pathIndex[key]); + } } - } else if (cb) { - process.nextTick(emitCloseNT, _this); - cb(err); - } else { - process.nextTick(emitCloseNT, _this); + }; + + } else { + pathMatcher = function matchRFC(domainIndex) { + //NOTE: we should use path-match algorithm from S5.1.4 here + //(see : https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/canonical_cookie.cc#L299) + Object.keys(domainIndex).forEach(function (cookiePath) { + if (pathMatch(path, cookiePath)) { + var pathIndex = domainIndex[cookiePath]; + + for (var key in pathIndex) { + results.push(pathIndex[key]); + } + } + }); + }; + } + + var domains = permuteDomain(domain) || [domain]; + var idx = this.idx; + domains.forEach(function(curDomain) { + var domainIndex = idx[curDomain]; + if (!domainIndex) { + return; } + pathMatcher(domainIndex); }); - return this; -} + cb(null,results); +}; -function emitErrorAndCloseNT(self, err) { - emitErrorNT(self, err); - emitCloseNT(self); -} +MemoryCookieStore.prototype.putCookie = function(cookie, cb) { + if (!this.idx[cookie.domain]) { + this.idx[cookie.domain] = {}; + } + if (!this.idx[cookie.domain][cookie.path]) { + this.idx[cookie.domain][cookie.path] = {}; + } + this.idx[cookie.domain][cookie.path][cookie.key] = cookie; + cb(null); +}; -function emitCloseNT(self) { - if (self._writableState && !self._writableState.emitClose) return; - if (self._readableState && !self._readableState.emitClose) return; - self.emit('close'); -} +MemoryCookieStore.prototype.updateCookie = function(oldCookie, newCookie, cb) { + // updateCookie() may avoid updating cookies that are identical. For example, + // lastAccessed may not be important to some stores and an equality + // comparison could exclude that field. + this.putCookie(newCookie,cb); +}; -function undestroy() { - if (this._readableState) { - this._readableState.destroyed = false; - this._readableState.reading = false; - this._readableState.ended = false; - this._readableState.endEmitted = false; +MemoryCookieStore.prototype.removeCookie = function(domain, path, key, cb) { + if (this.idx[domain] && this.idx[domain][path] && this.idx[domain][path][key]) { + delete this.idx[domain][path][key]; } + cb(null); +}; - if (this._writableState) { - this._writableState.destroyed = false; - this._writableState.ended = false; - this._writableState.ending = false; - this._writableState.finalCalled = false; - this._writableState.prefinished = false; - this._writableState.finished = false; - this._writableState.errorEmitted = false; +MemoryCookieStore.prototype.removeCookies = function(domain, path, cb) { + if (this.idx[domain]) { + if (path) { + delete this.idx[domain][path]; + } else { + delete this.idx[domain]; + } } -} + return cb(null); +}; -function emitErrorNT(self, err) { - self.emit('error', err); +MemoryCookieStore.prototype.removeAllCookies = function(cb) { + this.idx = {}; + return cb(null); } -function errorOrDestroy(stream, err) { - // We have tests that rely on errors being emitted - // in the same tick, so changing this is semver major. - // For now when you opt-in to autoDestroy we allow - // the error to be emitted nextTick. In a future - // semver major update we should change the default to this. - var rState = stream._readableState; - var wState = stream._writableState; - if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); -} +MemoryCookieStore.prototype.getAllCookies = function(cb) { + var cookies = []; + var idx = this.idx; -module.exports = { - destroy: destroy, - undestroy: undestroy, - errorOrDestroy: errorOrDestroy + var domains = Object.keys(idx); + domains.forEach(function(domain) { + var paths = Object.keys(idx[domain]); + paths.forEach(function(path) { + var keys = Object.keys(idx[domain][path]); + keys.forEach(function(key) { + if (key !== null) { + cookies.push(idx[domain][path][key]); + } + }); + }); + }); + + // Sort by creationIndex so deserializing retains the creation order. + // When implementing your own store, this SHOULD retain the order too + cookies.sort(function(a,b) { + return (a.creationIndex||0) - (b.creationIndex||0); + }); + + cb(null, cookies); }; + /***/ }), -/***/ 76080: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 30495: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -// Ported from https://github.com/mafintosh/end-of-stream with -// permission from the author, Mathias Buus (@mafintosh). - +/*! + * Copyright (c) 2015, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ -var ERR_STREAM_PREMATURE_CLOSE = __nccwpck_require__(67214)/* .codes.ERR_STREAM_PREMATURE_CLOSE */ .q.ERR_STREAM_PREMATURE_CLOSE; +/* + * "A request-path path-matches a given cookie-path if at least one of the + * following conditions holds:" + */ +function pathMatch (reqPath, cookiePath) { + // "o The cookie-path and the request-path are identical." + if (cookiePath === reqPath) { + return true; + } -function once(callback) { - var called = false; - return function () { - if (called) return; - called = true; + var idx = reqPath.indexOf(cookiePath); + if (idx === 0) { + // "o The cookie-path is a prefix of the request-path, and the last + // character of the cookie-path is %x2F ("/")." + if (cookiePath.substr(-1) === "/") { + return true; + } - for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { - args[_key] = arguments[_key]; + // " o The cookie-path is a prefix of the request-path, and the first + // character of the request-path that is not included in the cookie- path + // is a %x2F ("/") character." + if (reqPath.substr(cookiePath.length, 1) === "/") { + return true; } + } - callback.apply(this, args); - }; + return false; } -function noop() {} +exports.U = pathMatch; -function isRequest(stream) { - return stream.setHeader && typeof stream.abort === 'function'; -} -function eos(stream, opts, callback) { - if (typeof opts === 'function') return eos(stream, null, opts); - if (!opts) opts = {}; - callback = once(callback || noop); - var readable = opts.readable || opts.readable !== false && stream.readable; - var writable = opts.writable || opts.writable !== false && stream.writable; +/***/ }), - var onlegacyfinish = function onlegacyfinish() { - if (!stream.writable) onfinish(); - }; +/***/ 91478: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - var writableEnded = stream._writableState && stream._writableState.finished; +"use strict"; +/*! + * Copyright (c) 2015, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ - var onfinish = function onfinish() { - writable = false; - writableEnded = true; - if (!readable) callback.call(stream); - }; +var pubsuffix = __nccwpck_require__(34964); - var readableEnded = stream._readableState && stream._readableState.endEmitted; +// Gives the permutation of all possible domainMatch()es of a given domain. The +// array is in shortest-to-longest order. Handy for indexing. +function permuteDomain (domain) { + var pubSuf = pubsuffix.getPublicSuffix(domain); + if (!pubSuf) { + return null; + } + if (pubSuf == domain) { + return [domain]; + } - var onend = function onend() { - readable = false; - readableEnded = true; - if (!writable) callback.call(stream); - }; + var prefix = domain.slice(0, -(pubSuf.length + 1)); // ".example.com" + var parts = prefix.split('.').reverse(); + var cur = pubSuf; + var permutations = [cur]; + while (parts.length) { + cur = parts.shift() + '.' + cur; + permutations.push(cur); + } + return permutations; +} - var onerror = function onerror(err) { - callback.call(stream, err); - }; +exports.permuteDomain = permuteDomain; - var onclose = function onclose() { - var err; - if (readable && !readableEnded) { - if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); - return callback.call(stream, err); - } +/***/ }), - if (writable && !writableEnded) { - if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); - return callback.call(stream, err); - } - }; +/***/ 34964: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - var onrequest = function onrequest() { - stream.req.on('finish', onfinish); - }; +"use strict"; +/*! + * Copyright (c) 2018, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ - if (isRequest(stream)) { - stream.on('complete', onfinish); - stream.on('abort', onclose); - if (stream.req) onrequest();else stream.on('request', onrequest); - } else if (writable && !stream._writableState) { - // legacy streams - stream.on('end', onlegacyfinish); - stream.on('close', onlegacyfinish); - } +var psl = __nccwpck_require__(29975); - stream.on('end', onend); - stream.on('finish', onfinish); - if (opts.error !== false) stream.on('error', onerror); - stream.on('close', onclose); - return function () { - stream.removeListener('complete', onfinish); - stream.removeListener('abort', onclose); - stream.removeListener('request', onrequest); - if (stream.req) stream.req.removeListener('finish', onfinish); - stream.removeListener('end', onlegacyfinish); - stream.removeListener('close', onlegacyfinish); - stream.removeListener('finish', onfinish); - stream.removeListener('end', onend); - stream.removeListener('error', onerror); - stream.removeListener('close', onclose); - }; +function getPublicSuffix(domain) { + return psl.get(domain); } -module.exports = eos; +exports.getPublicSuffix = getPublicSuffix; + /***/ }), -/***/ 39082: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 11013: +/***/ ((__unused_webpack_module, exports) => { "use strict"; +/*! + * Copyright (c) 2015, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +/*jshint unused:false */ -function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } - -function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } +function Store() { +} +exports.y = Store; -function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; } +// Stores may be synchronous, but are still required to use a +// Continuation-Passing Style API. The CookieJar itself will expose a "*Sync" +// API that converts from synchronous-callbacks to imperative style. +Store.prototype.synchronous = false; -function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; } +Store.prototype.findCookie = function(domain, path, key, cb) { + throw new Error('findCookie is not implemented'); +}; -function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +Store.prototype.findCookies = function(domain, path, cb) { + throw new Error('findCookies is not implemented'); +}; -var ERR_INVALID_ARG_TYPE = __nccwpck_require__(67214)/* .codes.ERR_INVALID_ARG_TYPE */ .q.ERR_INVALID_ARG_TYPE; +Store.prototype.putCookie = function(cookie, cb) { + throw new Error('putCookie is not implemented'); +}; -function from(Readable, iterable, opts) { - var iterator; +Store.prototype.updateCookie = function(oldCookie, newCookie, cb) { + // recommended default implementation: + // return this.putCookie(newCookie, cb); + throw new Error('updateCookie is not implemented'); +}; - if (iterable && typeof iterable.next === 'function') { - iterator = iterable; - } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); +Store.prototype.removeCookie = function(domain, path, key, cb) { + throw new Error('removeCookie is not implemented'); +}; - var readable = new Readable(_objectSpread({ - objectMode: true - }, opts)); // Reading boolean to protect against _read - // being called before last iteration completion. +Store.prototype.removeCookies = function(domain, path, cb) { + throw new Error('removeCookies is not implemented'); +}; - var reading = false; +Store.prototype.removeAllCookies = function(cb) { + throw new Error('removeAllCookies is not implemented'); +} - readable._read = function () { - if (!reading) { - reading = true; - next(); - } - }; +Store.prototype.getAllCookies = function(cb) { + throw new Error('getAllCookies is not implemented (therefore jar cannot be serialized)'); +}; - function next() { - return _next2.apply(this, arguments); - } - function _next2() { - _next2 = _asyncToGenerator(function* () { - try { - var _ref = yield iterator.next(), - value = _ref.value, - done = _ref.done; +/***/ }), - if (done) { - readable.push(null); - } else if (readable.push((yield value))) { - next(); - } else { - reading = false; - } - } catch (err) { - readable.destroy(err); - } - }); - return _next2.apply(this, arguments); - } +/***/ 30380: +/***/ ((module) => { - return readable; -} +// generated by genversion +module.exports = '2.5.0' -module.exports = from; /***/ }), -/***/ 76989: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 67087: +/***/ ((module) => { -"use strict"; -// Ported from https://github.com/mafintosh/pump with -// permission from the author, Mathias Buus (@mafintosh). +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +var byteToHex = []; +for (var i = 0; i < 256; ++i) { + byteToHex[i] = (i + 0x100).toString(16).substr(1); +} +function bytesToUuid(buf, offset) { + var i = offset || 0; + var bth = byteToHex; + // join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4 + return ([ + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]] + ]).join(''); +} -var eos; +module.exports = bytesToUuid; -function once(callback) { - var called = false; - return function () { - if (called) return; - called = true; - callback.apply(void 0, arguments); - }; -} -var _require$codes = __nccwpck_require__(67214)/* .codes */ .q, - ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, - ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; +/***/ }), -function noop(err) { - // Rethrow the error if it exists to avoid swallowing it - if (err) throw err; -} +/***/ 9117: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -function isRequest(stream) { - return stream.setHeader && typeof stream.abort === 'function'; -} +// Unique ID creation requires a high quality random # generator. In node.js +// this is pretty straight-forward - we use the crypto API. -function destroyer(stream, reading, writing, callback) { - callback = once(callback); - var closed = false; - stream.on('close', function () { - closed = true; - }); - if (eos === undefined) eos = __nccwpck_require__(76080); - eos(stream, { - readable: reading, - writable: writing - }, function (err) { - if (err) return callback(err); - closed = true; - callback(); - }); - var destroyed = false; - return function (err) { - if (closed) return; - if (destroyed) return; - destroyed = true; // request.destroy just do .end - .abort is what we want +var crypto = __nccwpck_require__(76417); - if (isRequest(stream)) return stream.abort(); - if (typeof stream.destroy === 'function') return stream.destroy(); - callback(err || new ERR_STREAM_DESTROYED('pipe')); - }; -} +module.exports = function nodeRNG() { + return crypto.randomBytes(16); +}; -function call(fn) { - fn(); -} -function pipe(from, to) { - return from.pipe(to); -} +/***/ }), -function popCallback(streams) { - if (!streams.length) return noop; - if (typeof streams[streams.length - 1] !== 'function') return noop; - return streams.pop(); -} +/***/ 71435: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -function pipeline() { - for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { - streams[_key] = arguments[_key]; +var rng = __nccwpck_require__(9117); +var bytesToUuid = __nccwpck_require__(67087); + +function v4(options, buf, offset) { + var i = buf && offset || 0; + + if (typeof(options) == 'string') { + buf = options === 'binary' ? new Array(16) : null; + options = null; } + options = options || {}; - var callback = popCallback(streams); - if (Array.isArray(streams[0])) streams = streams[0]; + var rnds = options.random || (options.rng || rng)(); - if (streams.length < 2) { - throw new ERR_MISSING_ARGS('streams'); + // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + rnds[6] = (rnds[6] & 0x0f) | 0x40; + rnds[8] = (rnds[8] & 0x3f) | 0x80; + + // Copy bytes to buffer, if provided + if (buf) { + for (var ii = 0; ii < 16; ++ii) { + buf[i + ii] = rnds[ii]; + } } - var error; - var destroys = streams.map(function (stream, i) { - var reading = i < streams.length - 1; - var writing = i > 0; - return destroyer(stream, reading, writing, function (err) { - if (!error) error = err; - if (err) destroys.forEach(call); - if (reading) return; - destroys.forEach(call); - callback(error); - }); - }); - return streams.reduce(pipe); + return buf || bytesToUuid(rnds); } -module.exports = pipeline; +module.exports = v4; + /***/ }), -/***/ 39948: +/***/ 70304: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -var ERR_INVALID_OPT_VALUE = __nccwpck_require__(67214)/* .codes.ERR_INVALID_OPT_VALUE */ .q.ERR_INVALID_OPT_VALUE; +var http = __nccwpck_require__(98605) +var https = __nccwpck_require__(57211) +var url = __nccwpck_require__(78835) +var util = __nccwpck_require__(31669) +var stream = __nccwpck_require__(92413) +var zlib = __nccwpck_require__(78761) +var aws2 = __nccwpck_require__(96342) +var aws4 = __nccwpck_require__(16071) +var httpSignature = __nccwpck_require__(42479) +var mime = __nccwpck_require__(43583) +var caseless = __nccwpck_require__(35684) +var ForeverAgent = __nccwpck_require__(47568) +var FormData = __nccwpck_require__(64334) +var extend = __nccwpck_require__(38171) +var isstream = __nccwpck_require__(83362) +var isTypedArray = __nccwpck_require__(10657).strict +var helpers = __nccwpck_require__(74845) +var cookies = __nccwpck_require__(50976) +var getProxyFromURI = __nccwpck_require__(75654) +var Querystring = __nccwpck_require__(66476)/* .Querystring */ .h +var Har = __nccwpck_require__(3248)/* .Har */ .t +var Auth = __nccwpck_require__(76996)/* .Auth */ .g +var OAuth = __nccwpck_require__(41174)/* .OAuth */ .f +var hawk = __nccwpck_require__(34473) +var Multipart = __nccwpck_require__(87810)/* .Multipart */ .$ +var Redirect = __nccwpck_require__(3048)/* .Redirect */ .l +var Tunnel = __nccwpck_require__(17619)/* .Tunnel */ .n +var now = __nccwpck_require__(85644) +var Buffer = __nccwpck_require__(21867).Buffer -function highWaterMarkFrom(options, isDuplex, duplexKey) { - return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; -} +var safeStringify = helpers.safeStringify +var isReadStream = helpers.isReadStream +var toBase64 = helpers.toBase64 +var defer = helpers.defer +var copy = helpers.copy +var version = helpers.version +var globalCookieJar = cookies.jar() -function getHighWaterMark(state, options, duplexKey, isDuplex) { - var hwm = highWaterMarkFrom(options, isDuplex, duplexKey); +var globalPool = {} - if (hwm != null) { - if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { - var name = isDuplex ? duplexKey : 'highWaterMark'; - throw new ERR_INVALID_OPT_VALUE(name, hwm); +function filterForNonReserved (reserved, options) { + // Filter out properties that are not reserved. + // Reserved values are passed in at call site. + + var object = {} + for (var i in options) { + var notReserved = (reserved.indexOf(i) === -1) + if (notReserved) { + object[i] = options[i] } + } + return object +} - return Math.floor(hwm); - } // Default value +function filterOutReservedFunctions (reserved, options) { + // Filter out properties that are functions and are reserved. + // Reserved values are passed in at call site. + var object = {} + for (var i in options) { + var isReserved = !(reserved.indexOf(i) === -1) + var isFunction = (typeof options[i] === 'function') + if (!(isReserved && isFunction)) { + object[i] = options[i] + } + } + return object +} - return state.objectMode ? 16 : 16 * 1024; +// Return a simpler request object to allow serialization +function requestToJSON () { + var self = this + return { + uri: self.uri, + method: self.method, + headers: self.headers + } } -module.exports = { - getHighWaterMark: getHighWaterMark -}; +// Return a simpler response object to allow serialization +function responseToJSON () { + var self = this + return { + statusCode: self.statusCode, + body: self.body, + headers: self.headers, + request: requestToJSON.call(self.request) + } +} -/***/ }), +function Request (options) { + // if given the method property in options, set property explicitMethod to true -/***/ 62387: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // extend the Request instance with any non-reserved properties + // remove any reserved functions from the options object + // set Request instance to be readable and writable + // call init -module.exports = __nccwpck_require__(92413); + var self = this + // start with HAR, then override with additional options + if (options.har) { + self._har = new Har(self) + options = self._har.options(options) + } -/***/ }), + stream.Stream.call(self) + var reserved = Object.keys(Request.prototype) + var nonReserved = filterForNonReserved(reserved, options) -/***/ 51642: -/***/ ((module, exports, __nccwpck_require__) => { + extend(self, nonReserved) + options = filterOutReservedFunctions(reserved, options) -var Stream = __nccwpck_require__(92413); -if (process.env.READABLE_STREAM === 'disable' && Stream) { - module.exports = Stream.Readable; - Object.assign(module.exports, Stream); - module.exports.Stream = Stream; -} else { - exports = module.exports = __nccwpck_require__(51433); - exports.Stream = Stream || exports; - exports.Readable = exports; - exports.Writable = __nccwpck_require__(26993); - exports.Duplex = __nccwpck_require__(41359); - exports.Transform = __nccwpck_require__(34415); - exports.PassThrough = __nccwpck_require__(81542); - exports.finished = __nccwpck_require__(76080); - exports.pipeline = __nccwpck_require__(76989); + self.readable = true + self.writable = true + if (options.method) { + self.explicitMethod = true + } + self._qs = new Querystring(self) + self._auth = new Auth(self) + self._oauth = new OAuth(self) + self._multipart = new Multipart(self) + self._redirect = new Redirect(self) + self._tunnel = new Tunnel(self) + self.init(options) } +util.inherits(Request, stream.Stream) -/***/ }), +// Debugging +Request.debug = process.env.NODE_DEBUG && /\brequest\b/.test(process.env.NODE_DEBUG) +function debug () { + if (Request.debug) { + console.error('REQUEST %s', util.format.apply(util, arguments)) + } +} +Request.prototype.debug = debug -/***/ 29977: -/***/ (() => { +Request.prototype.init = function (options) { + // init() contains all the code to setup the request object. + // the actual outgoing request is not started until start() is called + // this function is called from both the constructor and on redirect. + var self = this + if (!options) { + options = {} + } + self.headers = self.headers ? copy(self.headers) : {} -/*! ***************************************************************************** -Copyright (C) Microsoft. All rights reserved. -Licensed under the Apache License, Version 2.0 (the "License"); you may not use -this file except in compliance with the License. You may obtain a copy of the -License at http://www.apache.org/licenses/LICENSE-2.0 + // Delete headers with value undefined since they break + // ClientRequest.OutgoingMessage.setHeader in node 0.12 + for (var headerName in self.headers) { + if (typeof self.headers[headerName] === 'undefined') { + delete self.headers[headerName] + } + } -THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED -WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, -MERCHANTABLITY OR NON-INFRINGEMENT. + caseless.httpify(self, self.headers) -See the Apache Version 2.0 License for specific language governing permissions -and limitations under the License. -***************************************************************************** */ -var Reflect; -(function (Reflect) { - // Metadata Proposal - // https://rbuckton.github.io/reflect-metadata/ - (function (factory) { - var root = typeof global === "object" ? global : - typeof self === "object" ? self : - typeof this === "object" ? this : - Function("return this;")(); - var exporter = makeExporter(Reflect); - if (typeof root.Reflect === "undefined") { - root.Reflect = Reflect; - } - else { - exporter = makeExporter(root.Reflect, exporter); - } - factory(exporter); - function makeExporter(target, previous) { - return function (key, value) { - if (typeof target[key] !== "function") { - Object.defineProperty(target, key, { configurable: true, writable: true, value: value }); - } - if (previous) - previous(key, value); - }; - } - })(function (exporter) { - var hasOwn = Object.prototype.hasOwnProperty; - // feature test for Symbol support - var supportsSymbol = typeof Symbol === "function"; - var toPrimitiveSymbol = supportsSymbol && typeof Symbol.toPrimitive !== "undefined" ? Symbol.toPrimitive : "@@toPrimitive"; - var iteratorSymbol = supportsSymbol && typeof Symbol.iterator !== "undefined" ? Symbol.iterator : "@@iterator"; - var supportsCreate = typeof Object.create === "function"; // feature test for Object.create support - var supportsProto = { __proto__: [] } instanceof Array; // feature test for __proto__ support - var downLevel = !supportsCreate && !supportsProto; - var HashMap = { - // create an object in dictionary mode (a.k.a. "slow" mode in v8) - create: supportsCreate - ? function () { return MakeDictionary(Object.create(null)); } - : supportsProto - ? function () { return MakeDictionary({ __proto__: null }); } - : function () { return MakeDictionary({}); }, - has: downLevel - ? function (map, key) { return hasOwn.call(map, key); } - : function (map, key) { return key in map; }, - get: downLevel - ? function (map, key) { return hasOwn.call(map, key) ? map[key] : undefined; } - : function (map, key) { return map[key]; }, - }; - // Load global or shim versions of Map, Set, and WeakMap - var functionPrototype = Object.getPrototypeOf(Function); - var usePolyfill = typeof process === "object" && process.env && process.env["REFLECT_METADATA_USE_MAP_POLYFILL"] === "true"; - var _Map = !usePolyfill && typeof Map === "function" && typeof Map.prototype.entries === "function" ? Map : CreateMapPolyfill(); - var _Set = !usePolyfill && typeof Set === "function" && typeof Set.prototype.entries === "function" ? Set : CreateSetPolyfill(); - var _WeakMap = !usePolyfill && typeof WeakMap === "function" ? WeakMap : CreateWeakMapPolyfill(); - // [[Metadata]] internal slot - // https://rbuckton.github.io/reflect-metadata/#ordinary-object-internal-methods-and-internal-slots - var Metadata = new _WeakMap(); - /** - * Applies a set of decorators to a property of a target object. - * @param decorators An array of decorators. - * @param target The target object. - * @param propertyKey (Optional) The property key to decorate. - * @param attributes (Optional) The property descriptor for the target key. - * @remarks Decorators are applied in reverse order. - * @example - * - * class Example { - * // property declarations are not part of ES6, though they are valid in TypeScript: - * // static staticProperty; - * // property; - * - * constructor(p) { } - * static staticMethod(p) { } - * method(p) { } - * } - * - * // constructor - * Example = Reflect.decorate(decoratorsArray, Example); - * - * // property (on constructor) - * Reflect.decorate(decoratorsArray, Example, "staticProperty"); - * - * // property (on prototype) - * Reflect.decorate(decoratorsArray, Example.prototype, "property"); - * - * // method (on constructor) - * Object.defineProperty(Example, "staticMethod", - * Reflect.decorate(decoratorsArray, Example, "staticMethod", - * Object.getOwnPropertyDescriptor(Example, "staticMethod"))); - * - * // method (on prototype) - * Object.defineProperty(Example.prototype, "method", - * Reflect.decorate(decoratorsArray, Example.prototype, "method", - * Object.getOwnPropertyDescriptor(Example.prototype, "method"))); - * - */ - function decorate(decorators, target, propertyKey, attributes) { - if (!IsUndefined(propertyKey)) { - if (!IsArray(decorators)) - throw new TypeError(); - if (!IsObject(target)) - throw new TypeError(); - if (!IsObject(attributes) && !IsUndefined(attributes) && !IsNull(attributes)) - throw new TypeError(); - if (IsNull(attributes)) - attributes = undefined; - propertyKey = ToPropertyKey(propertyKey); - return DecorateProperty(decorators, target, propertyKey, attributes); - } - else { - if (!IsArray(decorators)) - throw new TypeError(); - if (!IsConstructor(target)) - throw new TypeError(); - return DecorateConstructor(decorators, target); - } - } - exporter("decorate", decorate); - // 4.1.2 Reflect.metadata(metadataKey, metadataValue) - // https://rbuckton.github.io/reflect-metadata/#reflect.metadata - /** - * A default metadata decorator factory that can be used on a class, class member, or parameter. - * @param metadataKey The key for the metadata entry. - * @param metadataValue The value for the metadata entry. - * @returns A decorator function. - * @remarks - * If `metadataKey` is already defined for the target and target key, the - * metadataValue for that key will be overwritten. - * @example - * - * // constructor - * @Reflect.metadata(key, value) - * class Example { - * } - * - * // property (on constructor, TypeScript only) - * class Example { - * @Reflect.metadata(key, value) - * static staticProperty; - * } - * - * // property (on prototype, TypeScript only) - * class Example { - * @Reflect.metadata(key, value) - * property; - * } - * - * // method (on constructor) - * class Example { - * @Reflect.metadata(key, value) - * static staticMethod() { } - * } - * - * // method (on prototype) - * class Example { - * @Reflect.metadata(key, value) - * method() { } - * } - * - */ - function metadata(metadataKey, metadataValue) { - function decorator(target, propertyKey) { - if (!IsObject(target)) - throw new TypeError(); - if (!IsUndefined(propertyKey) && !IsPropertyKey(propertyKey)) - throw new TypeError(); - OrdinaryDefineOwnMetadata(metadataKey, metadataValue, target, propertyKey); - } - return decorator; - } - exporter("metadata", metadata); - /** - * Define a unique metadata entry on the target. - * @param metadataKey A key used to store and retrieve metadata. - * @param metadataValue A value that contains attached metadata. - * @param target The target object on which to define metadata. - * @param propertyKey (Optional) The property key for the target. - * @example - * - * class Example { - * // property declarations are not part of ES6, though they are valid in TypeScript: - * // static staticProperty; - * // property; - * - * constructor(p) { } - * static staticMethod(p) { } - * method(p) { } - * } - * - * // constructor - * Reflect.defineMetadata("custom:annotation", options, Example); - * - * // property (on constructor) - * Reflect.defineMetadata("custom:annotation", options, Example, "staticProperty"); - * - * // property (on prototype) - * Reflect.defineMetadata("custom:annotation", options, Example.prototype, "property"); - * - * // method (on constructor) - * Reflect.defineMetadata("custom:annotation", options, Example, "staticMethod"); - * - * // method (on prototype) - * Reflect.defineMetadata("custom:annotation", options, Example.prototype, "method"); - * - * // decorator factory as metadata-producing annotation. - * function MyAnnotation(options): Decorator { - * return (target, key?) => Reflect.defineMetadata("custom:annotation", options, target, key); - * } - * - */ - function defineMetadata(metadataKey, metadataValue, target, propertyKey) { - if (!IsObject(target)) - throw new TypeError(); - if (!IsUndefined(propertyKey)) - propertyKey = ToPropertyKey(propertyKey); - return OrdinaryDefineOwnMetadata(metadataKey, metadataValue, target, propertyKey); - } - exporter("defineMetadata", defineMetadata); - /** - * Gets a value indicating whether the target object or its prototype chain has the provided metadata key defined. - * @param metadataKey A key used to store and retrieve metadata. - * @param target The target object on which the metadata is defined. - * @param propertyKey (Optional) The property key for the target. - * @returns `true` if the metadata key was defined on the target object or its prototype chain; otherwise, `false`. - * @example - * - * class Example { - * // property declarations are not part of ES6, though they are valid in TypeScript: - * // static staticProperty; - * // property; - * - * constructor(p) { } - * static staticMethod(p) { } - * method(p) { } - * } - * - * // constructor - * result = Reflect.hasMetadata("custom:annotation", Example); - * - * // property (on constructor) - * result = Reflect.hasMetadata("custom:annotation", Example, "staticProperty"); - * - * // property (on prototype) - * result = Reflect.hasMetadata("custom:annotation", Example.prototype, "property"); - * - * // method (on constructor) - * result = Reflect.hasMetadata("custom:annotation", Example, "staticMethod"); - * - * // method (on prototype) - * result = Reflect.hasMetadata("custom:annotation", Example.prototype, "method"); - * - */ - function hasMetadata(metadataKey, target, propertyKey) { - if (!IsObject(target)) - throw new TypeError(); - if (!IsUndefined(propertyKey)) - propertyKey = ToPropertyKey(propertyKey); - return OrdinaryHasMetadata(metadataKey, target, propertyKey); - } - exporter("hasMetadata", hasMetadata); - /** - * Gets a value indicating whether the target object has the provided metadata key defined. - * @param metadataKey A key used to store and retrieve metadata. - * @param target The target object on which the metadata is defined. - * @param propertyKey (Optional) The property key for the target. - * @returns `true` if the metadata key was defined on the target object; otherwise, `false`. - * @example - * - * class Example { - * // property declarations are not part of ES6, though they are valid in TypeScript: - * // static staticProperty; - * // property; - * - * constructor(p) { } - * static staticMethod(p) { } - * method(p) { } - * } - * - * // constructor - * result = Reflect.hasOwnMetadata("custom:annotation", Example); - * - * // property (on constructor) - * result = Reflect.hasOwnMetadata("custom:annotation", Example, "staticProperty"); - * - * // property (on prototype) - * result = Reflect.hasOwnMetadata("custom:annotation", Example.prototype, "property"); - * - * // method (on constructor) - * result = Reflect.hasOwnMetadata("custom:annotation", Example, "staticMethod"); - * - * // method (on prototype) - * result = Reflect.hasOwnMetadata("custom:annotation", Example.prototype, "method"); - * - */ - function hasOwnMetadata(metadataKey, target, propertyKey) { - if (!IsObject(target)) - throw new TypeError(); - if (!IsUndefined(propertyKey)) - propertyKey = ToPropertyKey(propertyKey); - return OrdinaryHasOwnMetadata(metadataKey, target, propertyKey); - } - exporter("hasOwnMetadata", hasOwnMetadata); - /** - * Gets the metadata value for the provided metadata key on the target object or its prototype chain. - * @param metadataKey A key used to store and retrieve metadata. - * @param target The target object on which the metadata is defined. - * @param propertyKey (Optional) The property key for the target. - * @returns The metadata value for the metadata key if found; otherwise, `undefined`. - * @example - * - * class Example { - * // property declarations are not part of ES6, though they are valid in TypeScript: - * // static staticProperty; - * // property; - * - * constructor(p) { } - * static staticMethod(p) { } - * method(p) { } - * } - * - * // constructor - * result = Reflect.getMetadata("custom:annotation", Example); - * - * // property (on constructor) - * result = Reflect.getMetadata("custom:annotation", Example, "staticProperty"); - * - * // property (on prototype) - * result = Reflect.getMetadata("custom:annotation", Example.prototype, "property"); - * - * // method (on constructor) - * result = Reflect.getMetadata("custom:annotation", Example, "staticMethod"); - * - * // method (on prototype) - * result = Reflect.getMetadata("custom:annotation", Example.prototype, "method"); - * - */ - function getMetadata(metadataKey, target, propertyKey) { - if (!IsObject(target)) - throw new TypeError(); - if (!IsUndefined(propertyKey)) - propertyKey = ToPropertyKey(propertyKey); - return OrdinaryGetMetadata(metadataKey, target, propertyKey); - } - exporter("getMetadata", getMetadata); - /** - * Gets the metadata value for the provided metadata key on the target object. - * @param metadataKey A key used to store and retrieve metadata. - * @param target The target object on which the metadata is defined. - * @param propertyKey (Optional) The property key for the target. - * @returns The metadata value for the metadata key if found; otherwise, `undefined`. - * @example - * - * class Example { - * // property declarations are not part of ES6, though they are valid in TypeScript: - * // static staticProperty; - * // property; - * - * constructor(p) { } - * static staticMethod(p) { } - * method(p) { } - * } - * - * // constructor - * result = Reflect.getOwnMetadata("custom:annotation", Example); - * - * // property (on constructor) - * result = Reflect.getOwnMetadata("custom:annotation", Example, "staticProperty"); - * - * // property (on prototype) - * result = Reflect.getOwnMetadata("custom:annotation", Example.prototype, "property"); - * - * // method (on constructor) - * result = Reflect.getOwnMetadata("custom:annotation", Example, "staticMethod"); - * - * // method (on prototype) - * result = Reflect.getOwnMetadata("custom:annotation", Example.prototype, "method"); - * - */ - function getOwnMetadata(metadataKey, target, propertyKey) { - if (!IsObject(target)) - throw new TypeError(); - if (!IsUndefined(propertyKey)) - propertyKey = ToPropertyKey(propertyKey); - return OrdinaryGetOwnMetadata(metadataKey, target, propertyKey); - } - exporter("getOwnMetadata", getOwnMetadata); - /** - * Gets the metadata keys defined on the target object or its prototype chain. - * @param target The target object on which the metadata is defined. - * @param propertyKey (Optional) The property key for the target. - * @returns An array of unique metadata keys. - * @example - * - * class Example { - * // property declarations are not part of ES6, though they are valid in TypeScript: - * // static staticProperty; - * // property; - * - * constructor(p) { } - * static staticMethod(p) { } - * method(p) { } - * } - * - * // constructor - * result = Reflect.getMetadataKeys(Example); - * - * // property (on constructor) - * result = Reflect.getMetadataKeys(Example, "staticProperty"); - * - * // property (on prototype) - * result = Reflect.getMetadataKeys(Example.prototype, "property"); - * - * // method (on constructor) - * result = Reflect.getMetadataKeys(Example, "staticMethod"); - * - * // method (on prototype) - * result = Reflect.getMetadataKeys(Example.prototype, "method"); - * - */ - function getMetadataKeys(target, propertyKey) { - if (!IsObject(target)) - throw new TypeError(); - if (!IsUndefined(propertyKey)) - propertyKey = ToPropertyKey(propertyKey); - return OrdinaryMetadataKeys(target, propertyKey); - } - exporter("getMetadataKeys", getMetadataKeys); - /** - * Gets the unique metadata keys defined on the target object. - * @param target The target object on which the metadata is defined. - * @param propertyKey (Optional) The property key for the target. - * @returns An array of unique metadata keys. - * @example - * - * class Example { - * // property declarations are not part of ES6, though they are valid in TypeScript: - * // static staticProperty; - * // property; - * - * constructor(p) { } - * static staticMethod(p) { } - * method(p) { } - * } - * - * // constructor - * result = Reflect.getOwnMetadataKeys(Example); - * - * // property (on constructor) - * result = Reflect.getOwnMetadataKeys(Example, "staticProperty"); - * - * // property (on prototype) - * result = Reflect.getOwnMetadataKeys(Example.prototype, "property"); - * - * // method (on constructor) - * result = Reflect.getOwnMetadataKeys(Example, "staticMethod"); - * - * // method (on prototype) - * result = Reflect.getOwnMetadataKeys(Example.prototype, "method"); - * - */ - function getOwnMetadataKeys(target, propertyKey) { - if (!IsObject(target)) - throw new TypeError(); - if (!IsUndefined(propertyKey)) - propertyKey = ToPropertyKey(propertyKey); - return OrdinaryOwnMetadataKeys(target, propertyKey); - } - exporter("getOwnMetadataKeys", getOwnMetadataKeys); - /** - * Deletes the metadata entry from the target object with the provided key. - * @param metadataKey A key used to store and retrieve metadata. - * @param target The target object on which the metadata is defined. - * @param propertyKey (Optional) The property key for the target. - * @returns `true` if the metadata entry was found and deleted; otherwise, false. - * @example - * - * class Example { - * // property declarations are not part of ES6, though they are valid in TypeScript: - * // static staticProperty; - * // property; - * - * constructor(p) { } - * static staticMethod(p) { } - * method(p) { } - * } - * - * // constructor - * result = Reflect.deleteMetadata("custom:annotation", Example); - * - * // property (on constructor) - * result = Reflect.deleteMetadata("custom:annotation", Example, "staticProperty"); - * - * // property (on prototype) - * result = Reflect.deleteMetadata("custom:annotation", Example.prototype, "property"); - * - * // method (on constructor) - * result = Reflect.deleteMetadata("custom:annotation", Example, "staticMethod"); - * - * // method (on prototype) - * result = Reflect.deleteMetadata("custom:annotation", Example.prototype, "method"); - * - */ - function deleteMetadata(metadataKey, target, propertyKey) { - if (!IsObject(target)) - throw new TypeError(); - if (!IsUndefined(propertyKey)) - propertyKey = ToPropertyKey(propertyKey); - var metadataMap = GetOrCreateMetadataMap(target, propertyKey, /*Create*/ false); - if (IsUndefined(metadataMap)) - return false; - if (!metadataMap.delete(metadataKey)) - return false; - if (metadataMap.size > 0) - return true; - var targetMetadata = Metadata.get(target); - targetMetadata.delete(propertyKey); - if (targetMetadata.size > 0) - return true; - Metadata.delete(target); - return true; - } - exporter("deleteMetadata", deleteMetadata); - function DecorateConstructor(decorators, target) { - for (var i = decorators.length - 1; i >= 0; --i) { - var decorator = decorators[i]; - var decorated = decorator(target); - if (!IsUndefined(decorated) && !IsNull(decorated)) { - if (!IsConstructor(decorated)) - throw new TypeError(); - target = decorated; - } - } - return target; - } - function DecorateProperty(decorators, target, propertyKey, descriptor) { - for (var i = decorators.length - 1; i >= 0; --i) { - var decorator = decorators[i]; - var decorated = decorator(target, propertyKey, descriptor); - if (!IsUndefined(decorated) && !IsNull(decorated)) { - if (!IsObject(decorated)) - throw new TypeError(); - descriptor = decorated; - } - } - return descriptor; - } - function GetOrCreateMetadataMap(O, P, Create) { - var targetMetadata = Metadata.get(O); - if (IsUndefined(targetMetadata)) { - if (!Create) - return undefined; - targetMetadata = new _Map(); - Metadata.set(O, targetMetadata); - } - var metadataMap = targetMetadata.get(P); - if (IsUndefined(metadataMap)) { - if (!Create) - return undefined; - metadataMap = new _Map(); - targetMetadata.set(P, metadataMap); - } - return metadataMap; - } - // 3.1.1.1 OrdinaryHasMetadata(MetadataKey, O, P) - // https://rbuckton.github.io/reflect-metadata/#ordinaryhasmetadata - function OrdinaryHasMetadata(MetadataKey, O, P) { - var hasOwn = OrdinaryHasOwnMetadata(MetadataKey, O, P); - if (hasOwn) - return true; - var parent = OrdinaryGetPrototypeOf(O); - if (!IsNull(parent)) - return OrdinaryHasMetadata(MetadataKey, parent, P); - return false; - } - // 3.1.2.1 OrdinaryHasOwnMetadata(MetadataKey, O, P) - // https://rbuckton.github.io/reflect-metadata/#ordinaryhasownmetadata - function OrdinaryHasOwnMetadata(MetadataKey, O, P) { - var metadataMap = GetOrCreateMetadataMap(O, P, /*Create*/ false); - if (IsUndefined(metadataMap)) - return false; - return ToBoolean(metadataMap.has(MetadataKey)); - } - // 3.1.3.1 OrdinaryGetMetadata(MetadataKey, O, P) - // https://rbuckton.github.io/reflect-metadata/#ordinarygetmetadata - function OrdinaryGetMetadata(MetadataKey, O, P) { - var hasOwn = OrdinaryHasOwnMetadata(MetadataKey, O, P); - if (hasOwn) - return OrdinaryGetOwnMetadata(MetadataKey, O, P); - var parent = OrdinaryGetPrototypeOf(O); - if (!IsNull(parent)) - return OrdinaryGetMetadata(MetadataKey, parent, P); - return undefined; - } - // 3.1.4.1 OrdinaryGetOwnMetadata(MetadataKey, O, P) - // https://rbuckton.github.io/reflect-metadata/#ordinarygetownmetadata - function OrdinaryGetOwnMetadata(MetadataKey, O, P) { - var metadataMap = GetOrCreateMetadataMap(O, P, /*Create*/ false); - if (IsUndefined(metadataMap)) - return undefined; - return metadataMap.get(MetadataKey); - } - // 3.1.5.1 OrdinaryDefineOwnMetadata(MetadataKey, MetadataValue, O, P) - // https://rbuckton.github.io/reflect-metadata/#ordinarydefineownmetadata - function OrdinaryDefineOwnMetadata(MetadataKey, MetadataValue, O, P) { - var metadataMap = GetOrCreateMetadataMap(O, P, /*Create*/ true); - metadataMap.set(MetadataKey, MetadataValue); - } - // 3.1.6.1 OrdinaryMetadataKeys(O, P) - // https://rbuckton.github.io/reflect-metadata/#ordinarymetadatakeys - function OrdinaryMetadataKeys(O, P) { - var ownKeys = OrdinaryOwnMetadataKeys(O, P); - var parent = OrdinaryGetPrototypeOf(O); - if (parent === null) - return ownKeys; - var parentKeys = OrdinaryMetadataKeys(parent, P); - if (parentKeys.length <= 0) - return ownKeys; - if (ownKeys.length <= 0) - return parentKeys; - var set = new _Set(); - var keys = []; - for (var _i = 0, ownKeys_1 = ownKeys; _i < ownKeys_1.length; _i++) { - var key = ownKeys_1[_i]; - var hasKey = set.has(key); - if (!hasKey) { - set.add(key); - keys.push(key); - } - } - for (var _a = 0, parentKeys_1 = parentKeys; _a < parentKeys_1.length; _a++) { - var key = parentKeys_1[_a]; - var hasKey = set.has(key); - if (!hasKey) { - set.add(key); - keys.push(key); - } - } - return keys; - } - // 3.1.7.1 OrdinaryOwnMetadataKeys(O, P) - // https://rbuckton.github.io/reflect-metadata/#ordinaryownmetadatakeys - function OrdinaryOwnMetadataKeys(O, P) { - var keys = []; - var metadataMap = GetOrCreateMetadataMap(O, P, /*Create*/ false); - if (IsUndefined(metadataMap)) - return keys; - var keysObj = metadataMap.keys(); - var iterator = GetIterator(keysObj); - var k = 0; - while (true) { - var next = IteratorStep(iterator); - if (!next) { - keys.length = k; - return keys; - } - var nextValue = IteratorValue(next); - try { - keys[k] = nextValue; - } - catch (e) { - try { - IteratorClose(iterator); - } - finally { - throw e; - } - } - k++; - } - } - // 6 ECMAScript Data Typ0es and Values - // https://tc39.github.io/ecma262/#sec-ecmascript-data-types-and-values - function Type(x) { - if (x === null) - return 1 /* Null */; - switch (typeof x) { - case "undefined": return 0 /* Undefined */; - case "boolean": return 2 /* Boolean */; - case "string": return 3 /* String */; - case "symbol": return 4 /* Symbol */; - case "number": return 5 /* Number */; - case "object": return x === null ? 1 /* Null */ : 6 /* Object */; - default: return 6 /* Object */; - } - } - // 6.1.1 The Undefined Type - // https://tc39.github.io/ecma262/#sec-ecmascript-language-types-undefined-type - function IsUndefined(x) { - return x === undefined; - } - // 6.1.2 The Null Type - // https://tc39.github.io/ecma262/#sec-ecmascript-language-types-null-type - function IsNull(x) { - return x === null; - } - // 6.1.5 The Symbol Type - // https://tc39.github.io/ecma262/#sec-ecmascript-language-types-symbol-type - function IsSymbol(x) { - return typeof x === "symbol"; - } - // 6.1.7 The Object Type - // https://tc39.github.io/ecma262/#sec-object-type - function IsObject(x) { - return typeof x === "object" ? x !== null : typeof x === "function"; - } - // 7.1 Type Conversion - // https://tc39.github.io/ecma262/#sec-type-conversion - // 7.1.1 ToPrimitive(input [, PreferredType]) - // https://tc39.github.io/ecma262/#sec-toprimitive - function ToPrimitive(input, PreferredType) { - switch (Type(input)) { - case 0 /* Undefined */: return input; - case 1 /* Null */: return input; - case 2 /* Boolean */: return input; - case 3 /* String */: return input; - case 4 /* Symbol */: return input; - case 5 /* Number */: return input; - } - var hint = PreferredType === 3 /* String */ ? "string" : PreferredType === 5 /* Number */ ? "number" : "default"; - var exoticToPrim = GetMethod(input, toPrimitiveSymbol); - if (exoticToPrim !== undefined) { - var result = exoticToPrim.call(input, hint); - if (IsObject(result)) - throw new TypeError(); - return result; - } - return OrdinaryToPrimitive(input, hint === "default" ? "number" : hint); - } - // 7.1.1.1 OrdinaryToPrimitive(O, hint) - // https://tc39.github.io/ecma262/#sec-ordinarytoprimitive - function OrdinaryToPrimitive(O, hint) { - if (hint === "string") { - var toString_1 = O.toString; - if (IsCallable(toString_1)) { - var result = toString_1.call(O); - if (!IsObject(result)) - return result; - } - var valueOf = O.valueOf; - if (IsCallable(valueOf)) { - var result = valueOf.call(O); - if (!IsObject(result)) - return result; - } - } - else { - var valueOf = O.valueOf; - if (IsCallable(valueOf)) { - var result = valueOf.call(O); - if (!IsObject(result)) - return result; - } - var toString_2 = O.toString; - if (IsCallable(toString_2)) { - var result = toString_2.call(O); - if (!IsObject(result)) - return result; - } - } - throw new TypeError(); - } - // 7.1.2 ToBoolean(argument) - // https://tc39.github.io/ecma262/2016/#sec-toboolean - function ToBoolean(argument) { - return !!argument; - } - // 7.1.12 ToString(argument) - // https://tc39.github.io/ecma262/#sec-tostring - function ToString(argument) { - return "" + argument; - } - // 7.1.14 ToPropertyKey(argument) - // https://tc39.github.io/ecma262/#sec-topropertykey - function ToPropertyKey(argument) { - var key = ToPrimitive(argument, 3 /* String */); - if (IsSymbol(key)) - return key; - return ToString(key); - } - // 7.2 Testing and Comparison Operations - // https://tc39.github.io/ecma262/#sec-testing-and-comparison-operations - // 7.2.2 IsArray(argument) - // https://tc39.github.io/ecma262/#sec-isarray - function IsArray(argument) { - return Array.isArray - ? Array.isArray(argument) - : argument instanceof Object - ? argument instanceof Array - : Object.prototype.toString.call(argument) === "[object Array]"; + if (!self.method) { + self.method = options.method || 'GET' + } + if (!self.localAddress) { + self.localAddress = options.localAddress + } + + self._qs.init(options) + + debug(options) + if (!self.pool && self.pool !== false) { + self.pool = globalPool + } + self.dests = self.dests || [] + self.__isRequestRequest = true + + // Protect against double callback + if (!self._callback && self.callback) { + self._callback = self.callback + self.callback = function () { + if (self._callbackCalled) { + return // Print a warning maybe? + } + self._callbackCalled = true + self._callback.apply(self, arguments) + } + self.on('error', self.callback.bind()) + self.on('complete', self.callback.bind(self, null)) + } + + // People use this property instead all the time, so support it + if (!self.uri && self.url) { + self.uri = self.url + delete self.url + } + + // If there's a baseUrl, then use it as the base URL (i.e. uri must be + // specified as a relative path and is appended to baseUrl). + if (self.baseUrl) { + if (typeof self.baseUrl !== 'string') { + return self.emit('error', new Error('options.baseUrl must be a string')) + } + + if (typeof self.uri !== 'string') { + return self.emit('error', new Error('options.uri must be a string when using options.baseUrl')) + } + + if (self.uri.indexOf('//') === 0 || self.uri.indexOf('://') !== -1) { + return self.emit('error', new Error('options.uri must be a path when using options.baseUrl')) + } + + // Handle all cases to make sure that there's only one slash between + // baseUrl and uri. + var baseUrlEndsWithSlash = self.baseUrl.lastIndexOf('/') === self.baseUrl.length - 1 + var uriStartsWithSlash = self.uri.indexOf('/') === 0 + + if (baseUrlEndsWithSlash && uriStartsWithSlash) { + self.uri = self.baseUrl + self.uri.slice(1) + } else if (baseUrlEndsWithSlash || uriStartsWithSlash) { + self.uri = self.baseUrl + self.uri + } else if (self.uri === '') { + self.uri = self.baseUrl + } else { + self.uri = self.baseUrl + '/' + self.uri + } + delete self.baseUrl + } + + // A URI is needed by this point, emit error if we haven't been able to get one + if (!self.uri) { + return self.emit('error', new Error('options.uri is a required argument')) + } + + // If a string URI/URL was given, parse it into a URL object + if (typeof self.uri === 'string') { + self.uri = url.parse(self.uri) + } + + // Some URL objects are not from a URL parsed string and need href added + if (!self.uri.href) { + self.uri.href = url.format(self.uri) + } + + // DEPRECATED: Warning for users of the old Unix Sockets URL Scheme + if (self.uri.protocol === 'unix:') { + return self.emit('error', new Error('`unix://` URL scheme is no longer supported. Please use the format `http://unix:SOCKET:PATH`')) + } + + // Support Unix Sockets + if (self.uri.host === 'unix') { + self.enableUnixSocket() + } + + if (self.strictSSL === false) { + self.rejectUnauthorized = false + } + + if (!self.uri.pathname) { self.uri.pathname = '/' } + + if (!(self.uri.host || (self.uri.hostname && self.uri.port)) && !self.uri.isUnix) { + // Invalid URI: it may generate lot of bad errors, like 'TypeError: Cannot call method `indexOf` of undefined' in CookieJar + // Detect and reject it as soon as possible + var faultyUri = url.format(self.uri) + var message = 'Invalid URI "' + faultyUri + '"' + if (Object.keys(options).length === 0) { + // No option ? This can be the sign of a redirect + // As this is a case where the user cannot do anything (they didn't call request directly with this URL) + // they should be warned that it can be caused by a redirection (can save some hair) + message += '. This can be caused by a crappy redirection.' + } + // This error was fatal + self.abort() + return self.emit('error', new Error(message)) + } + + if (!self.hasOwnProperty('proxy')) { + self.proxy = getProxyFromURI(self.uri) + } + + self.tunnel = self._tunnel.isEnabled() + if (self.proxy) { + self._tunnel.setup(options) + } + + self._redirect.onRequest(options) + + self.setHost = false + if (!self.hasHeader('host')) { + var hostHeaderName = self.originalHostHeaderName || 'host' + self.setHeader(hostHeaderName, self.uri.host) + // Drop :port suffix from Host header if known protocol. + if (self.uri.port) { + if ((self.uri.port === '80' && self.uri.protocol === 'http:') || + (self.uri.port === '443' && self.uri.protocol === 'https:')) { + self.setHeader(hostHeaderName, self.uri.hostname) + } + } + self.setHost = true + } + + self.jar(self._jar || options.jar) + + if (!self.uri.port) { + if (self.uri.protocol === 'http:') { self.uri.port = 80 } else if (self.uri.protocol === 'https:') { self.uri.port = 443 } + } + + if (self.proxy && !self.tunnel) { + self.port = self.proxy.port + self.host = self.proxy.hostname + } else { + self.port = self.uri.port + self.host = self.uri.hostname + } + + if (options.form) { + self.form(options.form) + } + + if (options.formData) { + var formData = options.formData + var requestForm = self.form() + var appendFormValue = function (key, value) { + if (value && value.hasOwnProperty('value') && value.hasOwnProperty('options')) { + requestForm.append(key, value.value, value.options) + } else { + requestForm.append(key, value) + } + } + for (var formKey in formData) { + if (formData.hasOwnProperty(formKey)) { + var formValue = formData[formKey] + if (formValue instanceof Array) { + for (var j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]) + } + } else { + appendFormValue(formKey, formValue) } - // 7.2.3 IsCallable(argument) - // https://tc39.github.io/ecma262/#sec-iscallable - function IsCallable(argument) { - // NOTE: This is an approximation as we cannot check for [[Call]] internal method. - return typeof argument === "function"; + } + } + } + + if (options.qs) { + self.qs(options.qs) + } + + if (self.uri.path) { + self.path = self.uri.path + } else { + self.path = self.uri.pathname + (self.uri.search || '') + } + + if (self.path.length === 0) { + self.path = '/' + } + + // Auth must happen last in case signing is dependent on other headers + if (options.aws) { + self.aws(options.aws) + } + + if (options.hawk) { + self.hawk(options.hawk) + } + + if (options.httpSignature) { + self.httpSignature(options.httpSignature) + } + + if (options.auth) { + if (Object.prototype.hasOwnProperty.call(options.auth, 'username')) { + options.auth.user = options.auth.username + } + if (Object.prototype.hasOwnProperty.call(options.auth, 'password')) { + options.auth.pass = options.auth.password + } + + self.auth( + options.auth.user, + options.auth.pass, + options.auth.sendImmediately, + options.auth.bearer + ) + } + + if (self.gzip && !self.hasHeader('accept-encoding')) { + self.setHeader('accept-encoding', 'gzip, deflate') + } + + if (self.uri.auth && !self.hasHeader('authorization')) { + var uriAuthPieces = self.uri.auth.split(':').map(function (item) { return self._qs.unescape(item) }) + self.auth(uriAuthPieces[0], uriAuthPieces.slice(1).join(':'), true) + } + + if (!self.tunnel && self.proxy && self.proxy.auth && !self.hasHeader('proxy-authorization')) { + var proxyAuthPieces = self.proxy.auth.split(':').map(function (item) { return self._qs.unescape(item) }) + var authHeader = 'Basic ' + toBase64(proxyAuthPieces.join(':')) + self.setHeader('proxy-authorization', authHeader) + } + + if (self.proxy && !self.tunnel) { + self.path = (self.uri.protocol + '//' + self.uri.host + self.path) + } + + if (options.json) { + self.json(options.json) + } + if (options.multipart) { + self.multipart(options.multipart) + } + + if (options.time) { + self.timing = true + + // NOTE: elapsedTime is deprecated in favor of .timings + self.elapsedTime = self.elapsedTime || 0 + } + + function setContentLength () { + if (isTypedArray(self.body)) { + self.body = Buffer.from(self.body) + } + + if (!self.hasHeader('content-length')) { + var length + if (typeof self.body === 'string') { + length = Buffer.byteLength(self.body) + } else if (Array.isArray(self.body)) { + length = self.body.reduce(function (a, b) { return a + b.length }, 0) + } else { + length = self.body.length + } + + if (length) { + self.setHeader('content-length', length) + } else { + self.emit('error', new Error('Argument error, options.body.')) + } + } + } + if (self.body && !isstream(self.body)) { + setContentLength() + } + + if (options.oauth) { + self.oauth(options.oauth) + } else if (self._oauth.params && self.hasHeader('authorization')) { + self.oauth(self._oauth.params) + } + + var protocol = self.proxy && !self.tunnel ? self.proxy.protocol : self.uri.protocol + var defaultModules = {'http:': http, 'https:': https} + var httpModules = self.httpModules || {} + + self.httpModule = httpModules[protocol] || defaultModules[protocol] + + if (!self.httpModule) { + return self.emit('error', new Error('Invalid protocol: ' + protocol)) + } + + if (options.ca) { + self.ca = options.ca + } + + if (!self.agent) { + if (options.agentOptions) { + self.agentOptions = options.agentOptions + } + + if (options.agentClass) { + self.agentClass = options.agentClass + } else if (options.forever) { + var v = version() + // use ForeverAgent in node 0.10- only + if (v.major === 0 && v.minor <= 10) { + self.agentClass = protocol === 'http:' ? ForeverAgent : ForeverAgent.SSL + } else { + self.agentClass = self.httpModule.Agent + self.agentOptions = self.agentOptions || {} + self.agentOptions.keepAlive = true + } + } else { + self.agentClass = self.httpModule.Agent + } + } + + if (self.pool === false) { + self.agent = false + } else { + self.agent = self.agent || self.getNewAgent() + } + + self.on('pipe', function (src) { + if (self.ntick && self._started) { + self.emit('error', new Error('You cannot pipe to this stream after the outbound request has started.')) + } + self.src = src + if (isReadStream(src)) { + if (!self.hasHeader('content-type')) { + self.setHeader('content-type', mime.lookup(src.path)) + } + } else { + if (src.headers) { + for (var i in src.headers) { + if (!self.hasHeader(i)) { + self.setHeader(i, src.headers[i]) + } } - // 7.2.4 IsConstructor(argument) - // https://tc39.github.io/ecma262/#sec-isconstructor - function IsConstructor(argument) { - // NOTE: This is an approximation as we cannot check for [[Construct]] internal method. - return typeof argument === "function"; + } + if (self._json && !self.hasHeader('content-type')) { + self.setHeader('content-type', 'application/json') + } + if (src.method && !self.explicitMethod) { + self.method = src.method + } + } + + // self.on('pipe', function () { + // console.error('You have already piped to this stream. Pipeing twice is likely to break the request.') + // }) + }) + + defer(function () { + if (self._aborted) { + return + } + + var end = function () { + if (self._form) { + if (!self._auth.hasAuth) { + self._form.pipe(self) + } else if (self._auth.hasAuth && self._auth.sentAuth) { + self._form.pipe(self) } - // 7.2.7 IsPropertyKey(argument) - // https://tc39.github.io/ecma262/#sec-ispropertykey - function IsPropertyKey(argument) { - switch (Type(argument)) { - case 3 /* String */: return true; - case 4 /* Symbol */: return true; - default: return false; - } + } + if (self._multipart && self._multipart.chunked) { + self._multipart.body.pipe(self) + } + if (self.body) { + if (isstream(self.body)) { + self.body.pipe(self) + } else { + setContentLength() + if (Array.isArray(self.body)) { + self.body.forEach(function (part) { + self.write(part) + }) + } else { + self.write(self.body) + } + self.end() } - // 7.3 Operations on Objects - // https://tc39.github.io/ecma262/#sec-operations-on-objects - // 7.3.9 GetMethod(V, P) - // https://tc39.github.io/ecma262/#sec-getmethod - function GetMethod(V, P) { - var func = V[P]; - if (func === undefined || func === null) - return undefined; - if (!IsCallable(func)) - throw new TypeError(); - return func; + } else if (self.requestBodyStream) { + console.warn('options.requestBodyStream is deprecated, please pass the request object to stream.pipe.') + self.requestBodyStream.pipe(self) + } else if (!self.src) { + if (self._auth.hasAuth && !self._auth.sentAuth) { + self.end() + return } - // 7.4 Operations on Iterator Objects - // https://tc39.github.io/ecma262/#sec-operations-on-iterator-objects - function GetIterator(obj) { - var method = GetMethod(obj, iteratorSymbol); - if (!IsCallable(method)) - throw new TypeError(); // from Call - var iterator = method.call(obj); - if (!IsObject(iterator)) - throw new TypeError(); - return iterator; + if (self.method !== 'GET' && typeof self.method !== 'undefined') { + self.setHeader('content-length', 0) } - // 7.4.4 IteratorValue(iterResult) - // https://tc39.github.io/ecma262/2016/#sec-iteratorvalue - function IteratorValue(iterResult) { - return iterResult.value; + self.end() + } + } + + if (self._form && !self.hasHeader('content-length')) { + // Before ending the request, we had to compute the length of the whole form, asyncly + self.setHeader(self._form.getHeaders(), true) + self._form.getLength(function (err, length) { + if (!err && !isNaN(length)) { + self.setHeader('content-length', length) } - // 7.4.5 IteratorStep(iterator) - // https://tc39.github.io/ecma262/#sec-iteratorstep - function IteratorStep(iterator) { - var result = iterator.next(); - return result.done ? false : result; + end() + }) + } else { + end() + } + + self.ntick = true + }) +} + +Request.prototype.getNewAgent = function () { + var self = this + var Agent = self.agentClass + var options = {} + if (self.agentOptions) { + for (var i in self.agentOptions) { + options[i] = self.agentOptions[i] + } + } + if (self.ca) { + options.ca = self.ca + } + if (self.ciphers) { + options.ciphers = self.ciphers + } + if (self.secureProtocol) { + options.secureProtocol = self.secureProtocol + } + if (self.secureOptions) { + options.secureOptions = self.secureOptions + } + if (typeof self.rejectUnauthorized !== 'undefined') { + options.rejectUnauthorized = self.rejectUnauthorized + } + + if (self.cert && self.key) { + options.key = self.key + options.cert = self.cert + } + + if (self.pfx) { + options.pfx = self.pfx + } + + if (self.passphrase) { + options.passphrase = self.passphrase + } + + var poolKey = '' + + // different types of agents are in different pools + if (Agent !== self.httpModule.Agent) { + poolKey += Agent.name + } + + // ca option is only relevant if proxy or destination are https + var proxy = self.proxy + if (typeof proxy === 'string') { + proxy = url.parse(proxy) + } + var isHttps = (proxy && proxy.protocol === 'https:') || this.uri.protocol === 'https:' + + if (isHttps) { + if (options.ca) { + if (poolKey) { + poolKey += ':' + } + poolKey += options.ca + } + + if (typeof options.rejectUnauthorized !== 'undefined') { + if (poolKey) { + poolKey += ':' + } + poolKey += options.rejectUnauthorized + } + + if (options.cert) { + if (poolKey) { + poolKey += ':' + } + poolKey += options.cert.toString('ascii') + options.key.toString('ascii') + } + + if (options.pfx) { + if (poolKey) { + poolKey += ':' + } + poolKey += options.pfx.toString('ascii') + } + + if (options.ciphers) { + if (poolKey) { + poolKey += ':' + } + poolKey += options.ciphers + } + + if (options.secureProtocol) { + if (poolKey) { + poolKey += ':' + } + poolKey += options.secureProtocol + } + + if (options.secureOptions) { + if (poolKey) { + poolKey += ':' + } + poolKey += options.secureOptions + } + } + + if (self.pool === globalPool && !poolKey && Object.keys(options).length === 0 && self.httpModule.globalAgent) { + // not doing anything special. Use the globalAgent + return self.httpModule.globalAgent + } + + // we're using a stored agent. Make sure it's protocol-specific + poolKey = self.uri.protocol + poolKey + + // generate a new agent for this setting if none yet exists + if (!self.pool[poolKey]) { + self.pool[poolKey] = new Agent(options) + // properly set maxSockets on new agents + if (self.pool.maxSockets) { + self.pool[poolKey].maxSockets = self.pool.maxSockets + } + } + + return self.pool[poolKey] +} + +Request.prototype.start = function () { + // start() is called once we are ready to send the outgoing HTTP request. + // this is usually called on the first write(), end() or on nextTick() + var self = this + + if (self.timing) { + // All timings will be relative to this request's startTime. In order to do this, + // we need to capture the wall-clock start time (via Date), immediately followed + // by the high-resolution timer (via now()). While these two won't be set + // at the _exact_ same time, they should be close enough to be able to calculate + // high-resolution, monotonically non-decreasing timestamps relative to startTime. + var startTime = new Date().getTime() + var startTimeNow = now() + } + + if (self._aborted) { + return + } + + self._started = true + self.method = self.method || 'GET' + self.href = self.uri.href + + if (self.src && self.src.stat && self.src.stat.size && !self.hasHeader('content-length')) { + self.setHeader('content-length', self.src.stat.size) + } + if (self._aws) { + self.aws(self._aws, true) + } + + // We have a method named auth, which is completely different from the http.request + // auth option. If we don't remove it, we're gonna have a bad time. + var reqOptions = copy(self) + delete reqOptions.auth + + debug('make request', self.uri.href) + + // node v6.8.0 now supports a `timeout` value in `http.request()`, but we + // should delete it for now since we handle timeouts manually for better + // consistency with node versions before v6.8.0 + delete reqOptions.timeout + + try { + self.req = self.httpModule.request(reqOptions) + } catch (err) { + self.emit('error', err) + return + } + + if (self.timing) { + self.startTime = startTime + self.startTimeNow = startTimeNow + + // Timing values will all be relative to startTime (by comparing to startTimeNow + // so we have an accurate clock) + self.timings = {} + } + + var timeout + if (self.timeout && !self.timeoutTimer) { + if (self.timeout < 0) { + timeout = 0 + } else if (typeof self.timeout === 'number' && isFinite(self.timeout)) { + timeout = self.timeout + } + } + + self.req.on('response', self.onRequestResponse.bind(self)) + self.req.on('error', self.onRequestError.bind(self)) + self.req.on('drain', function () { + self.emit('drain') + }) + + self.req.on('socket', function (socket) { + // `._connecting` was the old property which was made public in node v6.1.0 + var isConnecting = socket._connecting || socket.connecting + if (self.timing) { + self.timings.socket = now() - self.startTimeNow + + if (isConnecting) { + var onLookupTiming = function () { + self.timings.lookup = now() - self.startTimeNow } - // 7.4.6 IteratorClose(iterator, completion) - // https://tc39.github.io/ecma262/#sec-iteratorclose - function IteratorClose(iterator) { - var f = iterator["return"]; - if (f) - f.call(iterator); + + var onConnectTiming = function () { + self.timings.connect = now() - self.startTimeNow } - // 9.1 Ordinary Object Internal Methods and Internal Slots - // https://tc39.github.io/ecma262/#sec-ordinary-object-internal-methods-and-internal-slots - // 9.1.1.1 OrdinaryGetPrototypeOf(O) - // https://tc39.github.io/ecma262/#sec-ordinarygetprototypeof - function OrdinaryGetPrototypeOf(O) { - var proto = Object.getPrototypeOf(O); - if (typeof O !== "function" || O === functionPrototype) - return proto; - // TypeScript doesn't set __proto__ in ES5, as it's non-standard. - // Try to determine the superclass constructor. Compatible implementations - // must either set __proto__ on a subclass constructor to the superclass constructor, - // or ensure each class has a valid `constructor` property on its prototype that - // points back to the constructor. - // If this is not the same as Function.[[Prototype]], then this is definately inherited. - // This is the case when in ES6 or when using __proto__ in a compatible browser. - if (proto !== functionPrototype) - return proto; - // If the super prototype is Object.prototype, null, or undefined, then we cannot determine the heritage. - var prototype = O.prototype; - var prototypeProto = prototype && Object.getPrototypeOf(prototype); - if (prototypeProto == null || prototypeProto === Object.prototype) - return proto; - // If the constructor was not a function, then we cannot determine the heritage. - var constructor = prototypeProto.constructor; - if (typeof constructor !== "function") - return proto; - // If we have some kind of self-reference, then we cannot determine the heritage. - if (constructor === O) - return proto; - // we have a pretty good guess at the heritage. - return constructor; + + socket.once('lookup', onLookupTiming) + socket.once('connect', onConnectTiming) + + // clean up timing event listeners if needed on error + self.req.once('error', function () { + socket.removeListener('lookup', onLookupTiming) + socket.removeListener('connect', onConnectTiming) + }) + } + } + + var setReqTimeout = function () { + // This timeout sets the amount of time to wait *between* bytes sent + // from the server once connected. + // + // In particular, it's useful for erroring if the server fails to send + // data halfway through streaming a response. + self.req.setTimeout(timeout, function () { + if (self.req) { + self.abort() + var e = new Error('ESOCKETTIMEDOUT') + e.code = 'ESOCKETTIMEDOUT' + e.connect = false + self.emit('error', e) } - // naive Map shim - function CreateMapPolyfill() { - var cacheSentinel = {}; - var arraySentinel = []; - var MapIterator = /** @class */ (function () { - function MapIterator(keys, values, selector) { - this._index = 0; - this._keys = keys; - this._values = values; - this._selector = selector; - } - MapIterator.prototype["@@iterator"] = function () { return this; }; - MapIterator.prototype[iteratorSymbol] = function () { return this; }; - MapIterator.prototype.next = function () { - var index = this._index; - if (index >= 0 && index < this._keys.length) { - var result = this._selector(this._keys[index], this._values[index]); - if (index + 1 >= this._keys.length) { - this._index = -1; - this._keys = arraySentinel; - this._values = arraySentinel; - } - else { - this._index++; - } - return { value: result, done: false }; - } - return { value: undefined, done: true }; - }; - MapIterator.prototype.throw = function (error) { - if (this._index >= 0) { - this._index = -1; - this._keys = arraySentinel; - this._values = arraySentinel; - } - throw error; - }; - MapIterator.prototype.return = function (value) { - if (this._index >= 0) { - this._index = -1; - this._keys = arraySentinel; - this._values = arraySentinel; - } - return { value: value, done: true }; - }; - return MapIterator; - }()); - return /** @class */ (function () { - function Map() { - this._keys = []; - this._values = []; - this._cacheKey = cacheSentinel; - this._cacheIndex = -2; - } - Object.defineProperty(Map.prototype, "size", { - get: function () { return this._keys.length; }, - enumerable: true, - configurable: true - }); - Map.prototype.has = function (key) { return this._find(key, /*insert*/ false) >= 0; }; - Map.prototype.get = function (key) { - var index = this._find(key, /*insert*/ false); - return index >= 0 ? this._values[index] : undefined; - }; - Map.prototype.set = function (key, value) { - var index = this._find(key, /*insert*/ true); - this._values[index] = value; - return this; - }; - Map.prototype.delete = function (key) { - var index = this._find(key, /*insert*/ false); - if (index >= 0) { - var size = this._keys.length; - for (var i = index + 1; i < size; i++) { - this._keys[i - 1] = this._keys[i]; - this._values[i - 1] = this._values[i]; - } - this._keys.length--; - this._values.length--; - if (key === this._cacheKey) { - this._cacheKey = cacheSentinel; - this._cacheIndex = -2; - } - return true; - } - return false; - }; - Map.prototype.clear = function () { - this._keys.length = 0; - this._values.length = 0; - this._cacheKey = cacheSentinel; - this._cacheIndex = -2; - }; - Map.prototype.keys = function () { return new MapIterator(this._keys, this._values, getKey); }; - Map.prototype.values = function () { return new MapIterator(this._keys, this._values, getValue); }; - Map.prototype.entries = function () { return new MapIterator(this._keys, this._values, getEntry); }; - Map.prototype["@@iterator"] = function () { return this.entries(); }; - Map.prototype[iteratorSymbol] = function () { return this.entries(); }; - Map.prototype._find = function (key, insert) { - if (this._cacheKey !== key) { - this._cacheIndex = this._keys.indexOf(this._cacheKey = key); - } - if (this._cacheIndex < 0 && insert) { - this._cacheIndex = this._keys.length; - this._keys.push(key); - this._values.push(undefined); - } - return this._cacheIndex; - }; - return Map; - }()); - function getKey(key, _) { - return key; - } - function getValue(_, value) { - return value; - } - function getEntry(key, value) { - return [key, value]; - } + }) + } + if (timeout !== undefined) { + // Only start the connection timer if we're actually connecting a new + // socket, otherwise if we're already connected (because this is a + // keep-alive connection) do not bother. This is important since we won't + // get a 'connect' event for an already connected socket. + if (isConnecting) { + var onReqSockConnect = function () { + socket.removeListener('connect', onReqSockConnect) + self.clearTimeout() + setReqTimeout() } - // naive Set shim - function CreateSetPolyfill() { - return /** @class */ (function () { - function Set() { - this._map = new _Map(); - } - Object.defineProperty(Set.prototype, "size", { - get: function () { return this._map.size; }, - enumerable: true, - configurable: true - }); - Set.prototype.has = function (value) { return this._map.has(value); }; - Set.prototype.add = function (value) { return this._map.set(value, value), this; }; - Set.prototype.delete = function (value) { return this._map.delete(value); }; - Set.prototype.clear = function () { this._map.clear(); }; - Set.prototype.keys = function () { return this._map.keys(); }; - Set.prototype.values = function () { return this._map.values(); }; - Set.prototype.entries = function () { return this._map.entries(); }; - Set.prototype["@@iterator"] = function () { return this.keys(); }; - Set.prototype[iteratorSymbol] = function () { return this.keys(); }; - return Set; - }()); + + socket.on('connect', onReqSockConnect) + + self.req.on('error', function (err) { // eslint-disable-line handle-callback-err + socket.removeListener('connect', onReqSockConnect) + }) + + // Set a timeout in memory - this block will throw if the server takes more + // than `timeout` to write the HTTP status and headers (corresponding to + // the on('response') event on the client). NB: this measures wall-clock + // time, not the time between bytes sent by the server. + self.timeoutTimer = setTimeout(function () { + socket.removeListener('connect', onReqSockConnect) + self.abort() + var e = new Error('ETIMEDOUT') + e.code = 'ETIMEDOUT' + e.connect = true + self.emit('error', e) + }, timeout) + } else { + // We're already connected + setReqTimeout() + } + } + self.emit('socket', socket) + }) + + self.emit('request', self.req) +} + +Request.prototype.onRequestError = function (error) { + var self = this + if (self._aborted) { + return + } + if (self.req && self.req._reusedSocket && error.code === 'ECONNRESET' && + self.agent.addRequestNoreuse) { + self.agent = { addRequest: self.agent.addRequestNoreuse.bind(self.agent) } + self.start() + self.req.end() + return + } + self.clearTimeout() + self.emit('error', error) +} + +Request.prototype.onRequestResponse = function (response) { + var self = this + + if (self.timing) { + self.timings.response = now() - self.startTimeNow + } + + debug('onRequestResponse', self.uri.href, response.statusCode, response.headers) + response.on('end', function () { + if (self.timing) { + self.timings.end = now() - self.startTimeNow + response.timingStart = self.startTime + + // fill in the blanks for any periods that didn't trigger, such as + // no lookup or connect due to keep alive + if (!self.timings.socket) { + self.timings.socket = 0 + } + if (!self.timings.lookup) { + self.timings.lookup = self.timings.socket + } + if (!self.timings.connect) { + self.timings.connect = self.timings.lookup + } + if (!self.timings.response) { + self.timings.response = self.timings.connect + } + + debug('elapsed time', self.timings.end) + + // elapsedTime includes all redirects + self.elapsedTime += Math.round(self.timings.end) + + // NOTE: elapsedTime is deprecated in favor of .timings + response.elapsedTime = self.elapsedTime + + // timings is just for the final fetch + response.timings = self.timings + + // pre-calculate phase timings as well + response.timingPhases = { + wait: self.timings.socket, + dns: self.timings.lookup - self.timings.socket, + tcp: self.timings.connect - self.timings.lookup, + firstByte: self.timings.response - self.timings.connect, + download: self.timings.end - self.timings.response, + total: self.timings.end + } + } + debug('response end', self.uri.href, response.statusCode, response.headers) + }) + + if (self._aborted) { + debug('aborted', self.uri.href) + response.resume() + return + } + + self.response = response + response.request = self + response.toJSON = responseToJSON + + // XXX This is different on 0.10, because SSL is strict by default + if (self.httpModule === https && + self.strictSSL && (!response.hasOwnProperty('socket') || + !response.socket.authorized)) { + debug('strict ssl error', self.uri.href) + var sslErr = response.hasOwnProperty('socket') ? response.socket.authorizationError : self.uri.href + ' does not support SSL' + self.emit('error', new Error('SSL Error: ' + sslErr)) + return + } + + // Save the original host before any redirect (if it changes, we need to + // remove any authorization headers). Also remember the case of the header + // name because lots of broken servers expect Host instead of host and we + // want the caller to be able to specify this. + self.originalHost = self.getHeader('host') + if (!self.originalHostHeaderName) { + self.originalHostHeaderName = self.hasHeader('host') + } + if (self.setHost) { + self.removeHeader('host') + } + self.clearTimeout() + + var targetCookieJar = (self._jar && self._jar.setCookie) ? self._jar : globalCookieJar + var addCookie = function (cookie) { + // set the cookie if it's domain in the href's domain. + try { + targetCookieJar.setCookie(cookie, self.uri.href, {ignoreError: true}) + } catch (e) { + self.emit('error', e) + } + } + + response.caseless = caseless(response.headers) + + if (response.caseless.has('set-cookie') && (!self._disableCookies)) { + var headerName = response.caseless.has('set-cookie') + if (Array.isArray(response.headers[headerName])) { + response.headers[headerName].forEach(addCookie) + } else { + addCookie(response.headers[headerName]) + } + } + + if (self._redirect.onResponse(response)) { + return // Ignore the rest of the response + } else { + // Be a good stream and emit end when the response is finished. + // Hack to emit end on close because of a core bug that never fires end + response.on('close', function () { + if (!self._ended) { + self.response.emit('end') + } + }) + + response.once('end', function () { + self._ended = true + }) + + var noBody = function (code) { + return ( + self.method === 'HEAD' || + // Informational + (code >= 100 && code < 200) || + // No Content + code === 204 || + // Not Modified + code === 304 + ) + } + + var responseContent + if (self.gzip && !noBody(response.statusCode)) { + var contentEncoding = response.headers['content-encoding'] || 'identity' + contentEncoding = contentEncoding.trim().toLowerCase() + + // Be more lenient with decoding compressed responses, since (very rarely) + // servers send slightly invalid gzip responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + var zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + } + + if (contentEncoding === 'gzip') { + responseContent = zlib.createGunzip(zlibOptions) + response.pipe(responseContent) + } else if (contentEncoding === 'deflate') { + responseContent = zlib.createInflate(zlibOptions) + response.pipe(responseContent) + } else { + // Since previous versions didn't check for Content-Encoding header, + // ignore any invalid values to preserve backwards-compatibility + if (contentEncoding !== 'identity') { + debug('ignoring unrecognized Content-Encoding ' + contentEncoding) } - // naive WeakMap shim - function CreateWeakMapPolyfill() { - var UUID_SIZE = 16; - var keys = HashMap.create(); - var rootKey = CreateUniqueKey(); - return /** @class */ (function () { - function WeakMap() { - this._key = CreateUniqueKey(); - } - WeakMap.prototype.has = function (target) { - var table = GetOrCreateWeakMapTable(target, /*create*/ false); - return table !== undefined ? HashMap.has(table, this._key) : false; - }; - WeakMap.prototype.get = function (target) { - var table = GetOrCreateWeakMapTable(target, /*create*/ false); - return table !== undefined ? HashMap.get(table, this._key) : undefined; - }; - WeakMap.prototype.set = function (target, value) { - var table = GetOrCreateWeakMapTable(target, /*create*/ true); - table[this._key] = value; - return this; - }; - WeakMap.prototype.delete = function (target) { - var table = GetOrCreateWeakMapTable(target, /*create*/ false); - return table !== undefined ? delete table[this._key] : false; - }; - WeakMap.prototype.clear = function () { - // NOTE: not a real clear, just makes the previous data unreachable - this._key = CreateUniqueKey(); - }; - return WeakMap; - }()); - function CreateUniqueKey() { - var key; - do - key = "@@WeakMap@@" + CreateUUID(); - while (HashMap.has(keys, key)); - keys[key] = true; - return key; - } - function GetOrCreateWeakMapTable(target, create) { - if (!hasOwn.call(target, rootKey)) { - if (!create) - return undefined; - Object.defineProperty(target, rootKey, { value: HashMap.create() }); - } - return target[rootKey]; - } - function FillRandomBytes(buffer, size) { - for (var i = 0; i < size; ++i) - buffer[i] = Math.random() * 0xff | 0; - return buffer; - } - function GenRandomBytes(size) { - if (typeof Uint8Array === "function") { - if (typeof crypto !== "undefined") - return crypto.getRandomValues(new Uint8Array(size)); - if (typeof msCrypto !== "undefined") - return msCrypto.getRandomValues(new Uint8Array(size)); - return FillRandomBytes(new Uint8Array(size), size); - } - return FillRandomBytes(new Array(size), size); - } - function CreateUUID() { - var data = GenRandomBytes(UUID_SIZE); - // mark as random - RFC 4122 § 4.4 - data[6] = data[6] & 0x4f | 0x40; - data[8] = data[8] & 0xbf | 0x80; - var result = ""; - for (var offset = 0; offset < UUID_SIZE; ++offset) { - var byte = data[offset]; - if (offset === 4 || offset === 6 || offset === 8) - result += "-"; - if (byte < 16) - result += "0"; - result += byte.toString(16).toLowerCase(); - } - return result; - } + responseContent = response + } + } else { + responseContent = response + } + + if (self.encoding) { + if (self.dests.length !== 0) { + console.error('Ignoring encoding parameter as this stream is being piped to another stream which makes the encoding option invalid.') + } else { + responseContent.setEncoding(self.encoding) + } + } + + if (self._paused) { + responseContent.pause() + } + + self.responseContent = responseContent + + self.emit('response', response) + + self.dests.forEach(function (dest) { + self.pipeDest(dest) + }) + + responseContent.on('data', function (chunk) { + if (self.timing && !self.responseStarted) { + self.responseStartTime = (new Date()).getTime() + + // NOTE: responseStartTime is deprecated in favor of .timings + response.responseStartTime = self.responseStartTime + } + self._destdata = true + self.emit('data', chunk) + }) + responseContent.once('end', function (chunk) { + self.emit('end', chunk) + }) + responseContent.on('error', function (error) { + self.emit('error', error) + }) + responseContent.on('close', function () { self.emit('close') }) + + if (self.callback) { + self.readResponseBody(response) + } else { // if no callback + self.on('end', function () { + if (self._aborted) { + debug('aborted', self.uri.href) + return } - // uses a heuristic used by v8 and chakra to force an object into dictionary mode. - function MakeDictionary(obj) { - obj.__ = undefined; - delete obj.__; - return obj; + self.emit('complete', response) + }) + } + } + debug('finish init function', self.uri.href) +} + +Request.prototype.readResponseBody = function (response) { + var self = this + debug("reading response's body") + var buffers = [] + var bufferLength = 0 + var strings = [] + + self.on('data', function (chunk) { + if (!Buffer.isBuffer(chunk)) { + strings.push(chunk) + } else if (chunk.length) { + bufferLength += chunk.length + buffers.push(chunk) + } + }) + self.on('end', function () { + debug('end event', self.uri.href) + if (self._aborted) { + debug('aborted', self.uri.href) + // `buffer` is defined in the parent scope and used in a closure it exists for the life of the request. + // This can lead to leaky behavior if the user retains a reference to the request object. + buffers = [] + bufferLength = 0 + return + } + + if (bufferLength) { + debug('has body', self.uri.href, bufferLength) + response.body = Buffer.concat(buffers, bufferLength) + if (self.encoding !== null) { + response.body = response.body.toString(self.encoding) + } + // `buffer` is defined in the parent scope and used in a closure it exists for the life of the Request. + // This can lead to leaky behavior if the user retains a reference to the request object. + buffers = [] + bufferLength = 0 + } else if (strings.length) { + // The UTF8 BOM [0xEF,0xBB,0xBF] is converted to [0xFE,0xFF] in the JS UTC16/UCS2 representation. + // Strip this value out when the encoding is set to 'utf8', as upstream consumers won't expect it and it breaks JSON.parse(). + if (self.encoding === 'utf8' && strings[0].length > 0 && strings[0][0] === '\uFEFF') { + strings[0] = strings[0].substring(1) + } + response.body = strings.join('') + } + + if (self._json) { + try { + response.body = JSON.parse(response.body, self._jsonReviver) + } catch (e) { + debug('invalid JSON received', self.uri.href) + } + } + debug('emitting complete', self.uri.href) + if (typeof response.body === 'undefined' && !self._json) { + response.body = self.encoding === null ? Buffer.alloc(0) : '' + } + self.emit('complete', response, response.body) + }) +} + +Request.prototype.abort = function () { + var self = this + self._aborted = true + + if (self.req) { + self.req.abort() + } else if (self.response) { + self.response.destroy() + } + + self.clearTimeout() + self.emit('abort') +} + +Request.prototype.pipeDest = function (dest) { + var self = this + var response = self.response + // Called after the response is received + if (dest.headers && !dest.headersSent) { + if (response.caseless.has('content-type')) { + var ctname = response.caseless.has('content-type') + if (dest.setHeader) { + dest.setHeader(ctname, response.headers[ctname]) + } else { + dest.headers[ctname] = response.headers[ctname] + } + } + + if (response.caseless.has('content-length')) { + var clname = response.caseless.has('content-length') + if (dest.setHeader) { + dest.setHeader(clname, response.headers[clname]) + } else { + dest.headers[clname] = response.headers[clname] + } + } + } + if (dest.setHeader && !dest.headersSent) { + for (var i in response.headers) { + // If the response content is being decoded, the Content-Encoding header + // of the response doesn't represent the piped content, so don't pass it. + if (!self.gzip || i !== 'content-encoding') { + dest.setHeader(i, response.headers[i]) + } + } + dest.statusCode = response.statusCode + } + if (self.pipefilter) { + self.pipefilter(response, dest) + } +} + +Request.prototype.qs = function (q, clobber) { + var self = this + var base + if (!clobber && self.uri.query) { + base = self._qs.parse(self.uri.query) + } else { + base = {} + } + + for (var i in q) { + base[i] = q[i] + } + + var qs = self._qs.stringify(base) + + if (qs === '') { + return self + } + + self.uri = url.parse(self.uri.href.split('?')[0] + '?' + qs) + self.url = self.uri + self.path = self.uri.path + + if (self.uri.host === 'unix') { + self.enableUnixSocket() + } + + return self +} +Request.prototype.form = function (form) { + var self = this + if (form) { + if (!/^application\/x-www-form-urlencoded\b/.test(self.getHeader('content-type'))) { + self.setHeader('content-type', 'application/x-www-form-urlencoded') + } + self.body = (typeof form === 'string') + ? self._qs.rfc3986(form.toString('utf8')) + : self._qs.stringify(form).toString('utf8') + return self + } + // create form-data object + self._form = new FormData() + self._form.on('error', function (err) { + err.message = 'form-data: ' + err.message + self.emit('error', err) + self.abort() + }) + return self._form +} +Request.prototype.multipart = function (multipart) { + var self = this + + self._multipart.onRequest(multipart) + + if (!self._multipart.chunked) { + self.body = self._multipart.body + } + + return self +} +Request.prototype.json = function (val) { + var self = this + + if (!self.hasHeader('accept')) { + self.setHeader('accept', 'application/json') + } + + if (typeof self.jsonReplacer === 'function') { + self._jsonReplacer = self.jsonReplacer + } + + self._json = true + if (typeof val === 'boolean') { + if (self.body !== undefined) { + if (!/^application\/x-www-form-urlencoded\b/.test(self.getHeader('content-type'))) { + self.body = safeStringify(self.body, self._jsonReplacer) + } else { + self.body = self._qs.rfc3986(self.body) + } + if (!self.hasHeader('content-type')) { + self.setHeader('content-type', 'application/json') + } + } + } else { + self.body = safeStringify(val, self._jsonReplacer) + if (!self.hasHeader('content-type')) { + self.setHeader('content-type', 'application/json') + } + } + + if (typeof self.jsonReviver === 'function') { + self._jsonReviver = self.jsonReviver + } + + return self +} +Request.prototype.getHeader = function (name, headers) { + var self = this + var result, re, match + if (!headers) { + headers = self.headers + } + Object.keys(headers).forEach(function (key) { + if (key.length !== name.length) { + return + } + re = new RegExp(name, 'i') + match = key.match(re) + if (match) { + result = headers[key] + } + }) + return result +} +Request.prototype.enableUnixSocket = function () { + // Get the socket & request paths from the URL + var unixParts = this.uri.path.split(':') + var host = unixParts[0] + var path = unixParts[1] + // Apply unix properties to request + this.socketPath = host + this.uri.pathname = path + this.uri.path = path + this.uri.host = host + this.uri.hostname = host + this.uri.isUnix = true +} + +Request.prototype.auth = function (user, pass, sendImmediately, bearer) { + var self = this + + self._auth.onRequest(user, pass, sendImmediately, bearer) + + return self +} +Request.prototype.aws = function (opts, now) { + var self = this + + if (!now) { + self._aws = opts + return self + } + + if (opts.sign_version === 4 || opts.sign_version === '4') { + // use aws4 + var options = { + host: self.uri.host, + path: self.uri.path, + method: self.method, + headers: self.headers, + body: self.body + } + if (opts.service) { + options.service = opts.service + } + var signRes = aws4.sign(options, { + accessKeyId: opts.key, + secretAccessKey: opts.secret, + sessionToken: opts.session + }) + self.setHeader('authorization', signRes.headers.Authorization) + self.setHeader('x-amz-date', signRes.headers['X-Amz-Date']) + if (signRes.headers['X-Amz-Security-Token']) { + self.setHeader('x-amz-security-token', signRes.headers['X-Amz-Security-Token']) + } + } else { + // default: use aws-sign2 + var date = new Date() + self.setHeader('date', date.toUTCString()) + var auth = { + key: opts.key, + secret: opts.secret, + verb: self.method.toUpperCase(), + date: date, + contentType: self.getHeader('content-type') || '', + md5: self.getHeader('content-md5') || '', + amazonHeaders: aws2.canonicalizeHeaders(self.headers) + } + var path = self.uri.path + if (opts.bucket && path) { + auth.resource = '/' + opts.bucket + path + } else if (opts.bucket && !path) { + auth.resource = '/' + opts.bucket + } else if (!opts.bucket && path) { + auth.resource = path + } else if (!opts.bucket && !path) { + auth.resource = '/' + } + auth.resource = aws2.canonicalizeResource(auth.resource) + self.setHeader('authorization', aws2.authorization(auth)) + } + + return self +} +Request.prototype.httpSignature = function (opts) { + var self = this + httpSignature.signRequest({ + getHeader: function (header) { + return self.getHeader(header, self.headers) + }, + setHeader: function (header, value) { + self.setHeader(header, value) + }, + method: self.method, + path: self.path + }, opts) + debug('httpSignature authorization', self.getHeader('authorization')) + + return self +} +Request.prototype.hawk = function (opts) { + var self = this + self.setHeader('Authorization', hawk.header(self.uri, self.method, opts)) +} +Request.prototype.oauth = function (_oauth) { + var self = this + + self._oauth.onRequest(_oauth) + + return self +} + +Request.prototype.jar = function (jar) { + var self = this + var cookies + + if (self._redirect.redirectsFollowed === 0) { + self.originalCookieHeader = self.getHeader('cookie') + } + + if (!jar) { + // disable cookies + cookies = false + self._disableCookies = true + } else { + var targetCookieJar = jar.getCookieString ? jar : globalCookieJar + var urihref = self.uri.href + // fetch cookie in the Specified host + if (targetCookieJar) { + cookies = targetCookieJar.getCookieString(urihref) + } + } + + // if need cookie and cookie is not empty + if (cookies && cookies.length) { + if (self.originalCookieHeader) { + // Don't overwrite existing Cookie header + self.setHeader('cookie', self.originalCookieHeader + '; ' + cookies) + } else { + self.setHeader('cookie', cookies) + } + } + self._jar = jar + return self +} + +// Stream API +Request.prototype.pipe = function (dest, opts) { + var self = this + + if (self.response) { + if (self._destdata) { + self.emit('error', new Error('You cannot pipe after data has been emitted from the response.')) + } else if (self._ended) { + self.emit('error', new Error('You cannot pipe after the response has been ended.')) + } else { + stream.Stream.prototype.pipe.call(self, dest, opts) + self.pipeDest(dest) + return dest + } + } else { + self.dests.push(dest) + stream.Stream.prototype.pipe.call(self, dest, opts) + return dest + } +} +Request.prototype.write = function () { + var self = this + if (self._aborted) { return } + + if (!self._started) { + self.start() + } + if (self.req) { + return self.req.write.apply(self.req, arguments) + } +} +Request.prototype.end = function (chunk) { + var self = this + if (self._aborted) { return } + + if (chunk) { + self.write(chunk) + } + if (!self._started) { + self.start() + } + if (self.req) { + self.req.end() + } +} +Request.prototype.pause = function () { + var self = this + if (!self.responseContent) { + self._paused = true + } else { + self.responseContent.pause.apply(self.responseContent, arguments) + } +} +Request.prototype.resume = function () { + var self = this + if (!self.responseContent) { + self._paused = false + } else { + self.responseContent.resume.apply(self.responseContent, arguments) + } +} +Request.prototype.destroy = function () { + var self = this + this.clearTimeout() + if (!self._ended) { + self.end() + } else if (self.response) { + self.response.destroy() + } +} + +Request.prototype.clearTimeout = function () { + if (this.timeoutTimer) { + clearTimeout(this.timeoutTimer) + this.timeoutTimer = null + } +} + +Request.defaultProxyHeaderWhiteList = + Tunnel.defaultProxyHeaderWhiteList.slice() + +Request.defaultProxyHeaderExclusiveList = + Tunnel.defaultProxyHeaderExclusiveList.slice() + +// Exports + +Request.prototype.toJSON = requestToJSON +module.exports = Request + + +/***/ }), + +/***/ 21867: +/***/ ((module, exports, __nccwpck_require__) => { + +/* eslint-disable node/no-deprecated-api */ +var buffer = __nccwpck_require__(64293) +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} + + +/***/ }), + +/***/ 15118: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +/* eslint-disable node/no-deprecated-api */ + + + +var buffer = __nccwpck_require__(64293) +var Buffer = buffer.Buffer + +var safer = {} + +var key + +for (key in buffer) { + if (!buffer.hasOwnProperty(key)) continue + if (key === 'SlowBuffer' || key === 'Buffer') continue + safer[key] = buffer[key] +} + +var Safer = safer.Buffer = {} +for (key in Buffer) { + if (!Buffer.hasOwnProperty(key)) continue + if (key === 'allocUnsafe' || key === 'allocUnsafeSlow') continue + Safer[key] = Buffer[key] +} + +safer.Buffer.prototype = Buffer.prototype + +if (!Safer.from || Safer.from === Uint8Array.from) { + Safer.from = function (value, encodingOrOffset, length) { + if (typeof value === 'number') { + throw new TypeError('The "value" argument must not be of type number. Received type ' + typeof value) + } + if (value && typeof value.length === 'undefined') { + throw new TypeError('The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type ' + typeof value) + } + return Buffer(value, encodingOrOffset, length) + } +} + +if (!Safer.alloc) { + Safer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('The "size" argument must be of type number. Received type ' + typeof size) + } + if (size < 0 || size >= 2 * (1 << 30)) { + throw new RangeError('The value "' + size + '" is invalid for option "size"') + } + var buf = Buffer(size) + if (!fill || fill.length === 0) { + buf.fill(0) + } else if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + return buf + } +} + +if (!safer.kStringMaxLength) { + try { + safer.kStringMaxLength = process.binding('buffer').kStringMaxLength + } catch (e) { + // we can't determine kStringMaxLength in environments where process.binding + // is unsupported, so let's not set it + } +} + +if (!safer.constants) { + safer.constants = { + MAX_LENGTH: safer.kMaxLength + } + if (safer.kStringMaxLength) { + safer.constants.MAX_STRING_LENGTH = safer.kStringMaxLength + } +} + +module.exports = safer + + +/***/ }), + +/***/ 72043: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +;(function (sax) { // wrapper for non-node envs + sax.parser = function (strict, opt) { return new SAXParser(strict, opt) } + sax.SAXParser = SAXParser + sax.SAXStream = SAXStream + sax.createStream = createStream + + // When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns. + // When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)), + // since that's the earliest that a buffer overrun could occur. This way, checks are + // as rare as required, but as often as necessary to ensure never crossing this bound. + // Furthermore, buffers are only tested at most once per write(), so passing a very + // large string into write() might have undesirable effects, but this is manageable by + // the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme + // edge case, result in creating at most one complete copy of the string passed in. + // Set to Infinity to have unlimited buffers. + sax.MAX_BUFFER_LENGTH = 64 * 1024 + + var buffers = [ + 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype', + 'procInstName', 'procInstBody', 'entity', 'attribName', + 'attribValue', 'cdata', 'script' + ] + + sax.EVENTS = [ + 'text', + 'processinginstruction', + 'sgmldeclaration', + 'doctype', + 'comment', + 'opentagstart', + 'attribute', + 'opentag', + 'closetag', + 'opencdata', + 'cdata', + 'closecdata', + 'error', + 'end', + 'ready', + 'script', + 'opennamespace', + 'closenamespace' + ] + + function SAXParser (strict, opt) { + if (!(this instanceof SAXParser)) { + return new SAXParser(strict, opt) + } + + var parser = this + clearBuffers(parser) + parser.q = parser.c = '' + parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH + parser.opt = opt || {} + parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags + parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase' + parser.tags = [] + parser.closed = parser.closedRoot = parser.sawRoot = false + parser.tag = parser.error = null + parser.strict = !!strict + parser.noscript = !!(strict || parser.opt.noscript) + parser.state = S.BEGIN + parser.strictEntities = parser.opt.strictEntities + parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES) + parser.attribList = [] + + // namespaces form a prototype chain. + // it always points at the current tag, + // which protos to its parent tag. + if (parser.opt.xmlns) { + parser.ns = Object.create(rootNS) + } + + // mostly just for error reporting + parser.trackPosition = parser.opt.position !== false + if (parser.trackPosition) { + parser.position = parser.line = parser.column = 0 + } + emit(parser, 'onready') + } + + if (!Object.create) { + Object.create = function (o) { + function F () {} + F.prototype = o + var newf = new F() + return newf + } + } + + if (!Object.keys) { + Object.keys = function (o) { + var a = [] + for (var i in o) if (o.hasOwnProperty(i)) a.push(i) + return a + } + } + + function checkBufferLength (parser) { + var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10) + var maxActual = 0 + for (var i = 0, l = buffers.length; i < l; i++) { + var len = parser[buffers[i]].length + if (len > maxAllowed) { + // Text/cdata nodes can get big, and since they're buffered, + // we can get here under normal conditions. + // Avoid issues by emitting the text node now, + // so at least it won't get any bigger. + switch (buffers[i]) { + case 'textNode': + closeText(parser) + break + + case 'cdata': + emitNode(parser, 'oncdata', parser.cdata) + parser.cdata = '' + break + + case 'script': + emitNode(parser, 'onscript', parser.script) + parser.script = '' + break + + default: + error(parser, 'Max buffer length exceeded: ' + buffers[i]) } - }); -})(Reflect || (Reflect = {})); + } + maxActual = Math.max(maxActual, len) + } + // schedule the next check for the earliest possible buffer overrun. + var m = sax.MAX_BUFFER_LENGTH - maxActual + parser.bufferCheckPosition = m + parser.position + } + + function clearBuffers (parser) { + for (var i = 0, l = buffers.length; i < l; i++) { + parser[buffers[i]] = '' + } + } + + function flushBuffers (parser) { + closeText(parser) + if (parser.cdata !== '') { + emitNode(parser, 'oncdata', parser.cdata) + parser.cdata = '' + } + if (parser.script !== '') { + emitNode(parser, 'onscript', parser.script) + parser.script = '' + } + } + + SAXParser.prototype = { + end: function () { end(this) }, + write: write, + resume: function () { this.error = null; return this }, + close: function () { return this.write(null) }, + flush: function () { flushBuffers(this) } + } + + var Stream + try { + Stream = __nccwpck_require__(92413).Stream + } catch (ex) { + Stream = function () {} + } + + var streamWraps = sax.EVENTS.filter(function (ev) { + return ev !== 'error' && ev !== 'end' + }) + + function createStream (strict, opt) { + return new SAXStream(strict, opt) + } + + function SAXStream (strict, opt) { + if (!(this instanceof SAXStream)) { + return new SAXStream(strict, opt) + } + + Stream.apply(this) + + this._parser = new SAXParser(strict, opt) + this.writable = true + this.readable = true + + var me = this + + this._parser.onend = function () { + me.emit('end') + } + + this._parser.onerror = function (er) { + me.emit('error', er) + + // if didn't throw, then means error was handled. + // go ahead and clear error, so we can write again. + me._parser.error = null + } + + this._decoder = null + + streamWraps.forEach(function (ev) { + Object.defineProperty(me, 'on' + ev, { + get: function () { + return me._parser['on' + ev] + }, + set: function (h) { + if (!h) { + me.removeAllListeners(ev) + me._parser['on' + ev] = h + return h + } + me.on(ev, h) + }, + enumerable: true, + configurable: false + }) + }) + } + SAXStream.prototype = Object.create(Stream.prototype, { + constructor: { + value: SAXStream + } + }) -/***/ }), + SAXStream.prototype.write = function (data) { + if (typeof Buffer === 'function' && + typeof Buffer.isBuffer === 'function' && + Buffer.isBuffer(data)) { + if (!this._decoder) { + var SD = __nccwpck_require__(24304).StringDecoder + this._decoder = new SD('utf8') + } + data = this._decoder.write(data) + } -/***/ 48699: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + this._parser.write(data.toString()) + this.emit('data', data) + return true + } -"use strict"; -// Copyright 2010-2012 Mikeal Rogers -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. + SAXStream.prototype.end = function (chunk) { + if (chunk && chunk.length) { + this.write(chunk) + } + this._parser.end() + return true + } + SAXStream.prototype.on = function (ev, handler) { + var me = this + if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) { + me._parser['on' + ev] = function () { + var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments) + args.splice(0, 0, ev) + me.emit.apply(me, args) + } + } + return Stream.prototype.on.call(me, ev, handler) + } -var extend = __nccwpck_require__(38171) -var cookies = __nccwpck_require__(50976) -var helpers = __nccwpck_require__(74845) + // this really needs to be replaced with character classes. + // XML allows all manner of ridiculous numbers and digits. + var CDATA = '[CDATA[' + var DOCTYPE = 'DOCTYPE' + var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace' + var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/' + var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE } -var paramsHaveRequestBody = helpers.paramsHaveRequestBody + // http://www.w3.org/TR/REC-xml/#NT-NameStartChar + // This implementation works on strings, a single character at a time + // as such, it cannot ever support astral-plane characters (10000-EFFFF) + // without a significant breaking change to either this parser, or the + // JavaScript language. Implementation of an emoji-capable xml parser + // is left as an exercise for the reader. + var nameStart = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ -// organize params for patch, post, put, head, del -function initParams (uri, options, callback) { - if (typeof options === 'function') { - callback = options + var nameBody = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ + + var entityStart = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ + var entityBody = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ + + function isWhitespace (c) { + return c === ' ' || c === '\n' || c === '\r' || c === '\t' } - var params = {} - if (options !== null && typeof options === 'object') { - extend(params, options, {uri: uri}) - } else if (typeof uri === 'string') { - extend(params, {uri: uri}) - } else { - extend(params, uri) + function isQuote (c) { + return c === '"' || c === '\'' } - params.callback = callback || params.callback - return params -} + function isAttribEnd (c) { + return c === '>' || isWhitespace(c) + } -function request (uri, options, callback) { - if (typeof uri === 'undefined') { - throw new Error('undefined is not a valid uri or options object.') + function isMatch (regex, c) { + return regex.test(c) } - var params = initParams(uri, options, callback) + function notMatch (regex, c) { + return !isMatch(regex, c) + } - if (params.method === 'HEAD' && paramsHaveRequestBody(params)) { - throw new Error('HTTP HEAD requests MUST NOT include a request body.') + var S = 0 + sax.STATE = { + BEGIN: S++, // leading byte order mark or whitespace + BEGIN_WHITESPACE: S++, // leading whitespace + TEXT: S++, // general stuff + TEXT_ENTITY: S++, // & and such. + OPEN_WAKA: S++, // < + SGML_DECL: S++, // + SCRIPT: S++, //