From 40dfcf47e78c0f90defc4c242c07612c91d87849 Mon Sep 17 00:00:00 2001 From: Irene Alvarado Date: Wed, 21 Jul 2021 15:34:11 -0400 Subject: [PATCH] fix: update readme --- README.md | 25 +- dist/LICENSE | 16 +- dist/index.js | 485169 +++++++++++++++++++++--------------------- dist/index.js.map | 2 +- dist/post/index.js | 215 +- 5 files changed, 241128 insertions(+), 244299 deletions(-) diff --git a/README.md b/README.md index a017baa..dd3d9a8 100644 --- a/README.md +++ b/README.md @@ -101,8 +101,29 @@ For example, if this field is set to `Bearer abc123` then the following header i } ``` -And in the action yaml: -`authorization: 'Bearer abc123'` +#### `axios_config` (optional) + +Under the hood, the `http` backend uses [Axios](https://github.com/axios/axios) for data fetching. By default, Flat assumes you're interested in using the `GET` method to fetch data, but if you'd like to `POST` (e.g., sending a GraphQL query), the `axios_config` option allows you to override this behavior. + +Specifically, the `axios_config` parameter should reflect a relative path to a `.json` file in your repository. This JSON file should mirror the shape of [Axios' request config parameters](https://github.com/axios/axios#request-config), with a few notable exceptions. + +- `url` and `baseURL` will both be ignored, as the `http_url` specified above will take precedence. +- `headers` will be merged in with the authorization header described by the `authorization` parameter above. Please do not put secret keys here, as they will be stored in plain text! +- All `function` parameters will be ignored (e.g., `transformRequest`). +- The response type is always set to `responseType: 'stream'` in the background. + +An example `axios_config` might look thusly if you were interested in hitting GitHub's GraphQL API ([here is a demo](https://github.com/githubocto/flat-demo-graphql)) 👇 + +```json +{ + "method": "post", + "data": { + "query": "query { repository(owner:\"octocat\", name:\"Hello-World\") { issues(last:20, states:CLOSED) { edges { node { title url labels(first:5) { edges { node { name } } } } } } } }" + } +} +``` + +We advise escaping double quotes like `\"` in your JSON file. #### `downloaded_filename` diff --git a/dist/LICENSE b/dist/LICENSE index f8e0839..9915c7f 100644 --- a/dist/LICENSE +++ b/dist/LICENSE @@ -15,6 +15,15 @@ MIT @actions/io MIT +The MIT License (MIT) + +Copyright 2019 GitHub + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. @azure/ms-rest-azure-env MIT @@ -3091,7 +3100,7 @@ pg MIT MIT License -Copyright (c) 2010 - 2020 Brian Carlson +Copyright (c) 2010 - 2021 Brian Carlson Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal @@ -3182,7 +3191,7 @@ pg-protocol MIT MIT License -Copyright (c) 2010 - 2020 Brian Carlson +Copyright (c) 2010 - 2021 Brian Carlson Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal @@ -4199,8 +4208,7 @@ MIT underscore MIT -Copyright (c) 2009-2020 Jeremy Ashkenas, DocumentCloud and Investigative -Reporters & Editors +Copyright (c) 2009-2021 Jeremy Ashkenas, Julian Gonggrijp, and DocumentCloud and Investigative Reporters & Editors Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation diff --git a/dist/index.js b/dist/index.js index abd800d..744a241 100644 --- a/dist/index.js +++ b/dist/index.js @@ -260,7 +260,7 @@ var __importStar = (this && this.__importStar) || function (mod) { Object.defineProperty(exports, "__esModule", ({ value: true })); exports.isSQLConfig = exports.isHTTPConfig = exports.getConfig = void 0; const core = __importStar(__nccwpck_require__(42186)); -const z = __importStar(__nccwpck_require__(30489)); +const z = __importStar(__nccwpck_require__(63301)); const FormatEnum = z.enum(['csv', 'json']); const CommonConfigSchema = z.object({ downloaded_filename: z.string(), @@ -589,14 +589,27 @@ run().catch(error => { "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.issue = exports.issueCommand = void 0; const os = __importStar(__nccwpck_require__(12087)); const utils_1 = __nccwpck_require__(5278); /** @@ -675,6 +688,25 @@ function escapeProperty(s) { "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -684,14 +716,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; const command_1 = __nccwpck_require__(87351); const file_command_1 = __nccwpck_require__(717); const utils_1 = __nccwpck_require__(5278); @@ -758,7 +784,9 @@ function addPath(inputPath) { } exports.addPath = addPath; /** - * Gets the value of an input. The value is also trimmed. + * Gets the value of an input. + * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. + * Returns an empty string if the value is not defined. * * @param name name of the input to get * @param options optional. See InputOptions. @@ -769,9 +797,34 @@ function getInput(name, options) { if (options && options.required && !val) { throw new Error(`Input required and not supplied: ${name}`); } + if (options && options.trimWhitespace === false) { + return val; + } return val.trim(); } exports.getInput = getInput; +/** + * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. + * Support boolean input list: `true | True | TRUE | false | False | FALSE` . + * The return value is also in boolean type. + * ref: https://yaml.org/spec/1.2/spec.html#id2804923 + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns boolean + */ +function getBooleanInput(name, options) { + const trueValue = ['true', 'True', 'TRUE']; + const falseValue = ['false', 'False', 'FALSE']; + const val = getInput(name, options); + if (trueValue.includes(val)) + return true; + if (falseValue.includes(val)) + return false; + throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` + + `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); +} +exports.getBooleanInput = getBooleanInput; /** * Sets the value of an output. * @@ -780,6 +833,7 @@ exports.getInput = getInput; */ // eslint-disable-next-line @typescript-eslint/no-explicit-any function setOutput(name, value) { + process.stdout.write(os.EOL); command_1.issueCommand('set-output', { name }, value); } exports.setOutput = setOutput; @@ -921,14 +975,27 @@ exports.getState = getState; "use strict"; // For internal use, subject to change. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.issueCommand = void 0; // We use any as a valid input type /* eslint-disable @typescript-eslint/no-explicit-any */ const fs = __importStar(__nccwpck_require__(35747)); @@ -959,6 +1026,7 @@ exports.issueCommand = issueCommand; // We use any as a valid input type /* eslint-disable @typescript-eslint/no-explicit-any */ Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toCommandValue = void 0; /** * Sanitizes an input into a string so it can be passed into issueCommand safely * @param input input to sanitize into a string @@ -1649,11 +1717,18 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; var _a; Object.defineProperty(exports, "__esModule", ({ value: true })); const assert_1 = __nccwpck_require__(42357); -const fs = __nccwpck_require__(35747); -const path = __nccwpck_require__(85622); +const fs = __importStar(__nccwpck_require__(35747)); +const path = __importStar(__nccwpck_require__(85622)); _a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink; exports.IS_WINDOWS = process.platform === 'win32'; function exists(fsPath) { @@ -1851,11 +1926,18 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -const childProcess = __nccwpck_require__(63129); -const path = __nccwpck_require__(85622); +const childProcess = __importStar(__nccwpck_require__(63129)); +const path = __importStar(__nccwpck_require__(85622)); const util_1 = __nccwpck_require__(31669); -const ioUtil = __nccwpck_require__(81962); +const ioUtil = __importStar(__nccwpck_require__(81962)); const exec = util_1.promisify(childProcess.exec); /** * Copies a file or folder. @@ -2023,58 +2105,73 @@ function which(tool, check) { throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`); } } + return result; } - try { - // build the list of extensions to try - const extensions = []; - if (ioUtil.IS_WINDOWS && process.env.PATHEXT) { - for (const extension of process.env.PATHEXT.split(path.delimiter)) { - if (extension) { - extensions.push(extension); - } - } - } - // if it's rooted, return it if exists. otherwise return empty. - if (ioUtil.isRooted(tool)) { - const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); - if (filePath) { - return filePath; + const matches = yield findInPath(tool); + if (matches && matches.length > 0) { + return matches[0]; + } + return ''; + }); +} +exports.which = which; +/** + * Returns a list of all occurrences of the given tool on the system path. + * + * @returns Promise the paths of the tool + */ +function findInPath(tool) { + return __awaiter(this, void 0, void 0, function* () { + if (!tool) { + throw new Error("parameter 'tool' is required"); + } + // build the list of extensions to try + const extensions = []; + if (ioUtil.IS_WINDOWS && process.env['PATHEXT']) { + for (const extension of process.env['PATHEXT'].split(path.delimiter)) { + if (extension) { + extensions.push(extension); } - return ''; - } - // if any path separators, return empty - if (tool.includes('/') || (ioUtil.IS_WINDOWS && tool.includes('\\'))) { - return ''; } - // build the list of directories - // - // Note, technically "where" checks the current directory on Windows. From a toolkit perspective, - // it feels like we should not do this. Checking the current directory seems like more of a use - // case of a shell, and the which() function exposed by the toolkit should strive for consistency - // across platforms. - const directories = []; - if (process.env.PATH) { - for (const p of process.env.PATH.split(path.delimiter)) { - if (p) { - directories.push(p); - } - } + } + // if it's rooted, return it if exists. otherwise return empty. + if (ioUtil.isRooted(tool)) { + const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); + if (filePath) { + return [filePath]; } - // return the first match - for (const directory of directories) { - const filePath = yield ioUtil.tryGetExecutablePath(directory + path.sep + tool, extensions); - if (filePath) { - return filePath; + return []; + } + // if any path separators, return empty + if (tool.includes(path.sep)) { + return []; + } + // build the list of directories + // + // Note, technically "where" checks the current directory on Windows. From a toolkit perspective, + // it feels like we should not do this. Checking the current directory seems like more of a use + // case of a shell, and the which() function exposed by the toolkit should strive for consistency + // across platforms. + const directories = []; + if (process.env.PATH) { + for (const p of process.env.PATH.split(path.delimiter)) { + if (p) { + directories.push(p); } } - return ''; } - catch (err) { - throw new Error(`which failed with message ${err.message}`); + // find all matches + const matches = []; + for (const directory of directories) { + const filePath = yield ioUtil.tryGetExecutablePath(path.join(directory, tool), extensions); + if (filePath) { + matches.push(filePath); + } } + return matches; }); } -exports.which = which; +exports.findInPath = findInPath; function readCopyOptions(options) { const force = options.force == null ? true : options.force; const recursive = Boolean(options.recursive); @@ -2326,17 +2423,17 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } -var uuidv4 = _interopDefault(__nccwpck_require__(20621)); +var uuidv4 = _interopDefault(__nccwpck_require__(80824)); var axios = _interopDefault(__nccwpck_require__(96545)); var stream = __nccwpck_require__(92413); -var FormData = _interopDefault(__nccwpck_require__(64334)); -var tough = __nccwpck_require__(68930); +var FormData = _interopDefault(__nccwpck_require__(69970)); +var tough = __nccwpck_require__(47372); var tunnel = __nccwpck_require__(74294); var http = __nccwpck_require__(98605); var https = __nccwpck_require__(57211); var xml2js = __nccwpck_require__(66189); var os = __nccwpck_require__(12087); -var tslib = __nccwpck_require__(75636); +var tslib = __nccwpck_require__(35390); // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. @@ -6451,2042 +6548,1861 @@ exports.DomainCredentials = DomainCredentials; /***/ }), -/***/ 68930: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; -/*! - * Copyright (c) 2015, Salesforce.com, Inc. - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * 3. Neither the name of Salesforce.com nor the names of its contributors may - * be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ +/***/ 69970: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var net = __nccwpck_require__(11631); -var urlParse = __nccwpck_require__(78835).parse; +var CombinedStream = __nccwpck_require__(85443); var util = __nccwpck_require__(31669); -var pubsuffix = __nccwpck_require__(23005); -var Store = __nccwpck_require__(5340)/* .Store */ .y; -var MemoryCookieStore = __nccwpck_require__(96868)/* .MemoryCookieStore */ .m; -var pathMatch = __nccwpck_require__(55319)/* .pathMatch */ .U; -var VERSION = __nccwpck_require__(88263); +var path = __nccwpck_require__(85622); +var http = __nccwpck_require__(98605); +var https = __nccwpck_require__(57211); +var parseUrl = __nccwpck_require__(78835).parse; +var fs = __nccwpck_require__(35747); +var mime = __nccwpck_require__(43583); +var asynckit = __nccwpck_require__(14812); +var populate = __nccwpck_require__(53647); -var punycode; -try { - punycode = __nccwpck_require__(94213); -} catch(e) { - console.warn("tough-cookie: can't load punycode; won't use punycode for domain normalization"); -} +// Public API +module.exports = FormData; -// From RFC6265 S4.1.1 -// note that it excludes \x3B ";" -var COOKIE_OCTETS = /^[\x21\x23-\x2B\x2D-\x3A\x3C-\x5B\x5D-\x7E]+$/; +// make it a Stream +util.inherits(FormData, CombinedStream); -var CONTROL_CHARS = /[\x00-\x1F]/; +/** + * Create readable "multipart/form-data" streams. + * Can be used to submit forms + * and file uploads to other web applications. + * + * @constructor + * @param {Object} options - Properties to be added/overriden for FormData and CombinedStream + */ +function FormData(options) { + if (!(this instanceof FormData)) { + return new FormData(); + } -// From Chromium // '\r', '\n' and '\0' should be treated as a terminator in -// the "relaxed" mode, see: -// https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/parsed_cookie.cc#L60 -var TERMINATORS = ['\n', '\r', '\0']; + this._overheadLength = 0; + this._valueLength = 0; + this._valuesToMeasure = []; -// RFC6265 S4.1.1 defines path value as 'any CHAR except CTLs or ";"' -// Note ';' is \x3B -var PATH_VALUE = /[\x20-\x3A\x3C-\x7E]+/; + CombinedStream.call(this); -// date-time parsing constants (RFC6265 S5.1.1) + options = options || {}; + for (var option in options) { + this[option] = options[option]; + } +} -var DATE_DELIM = /[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]/; +FormData.LINE_BREAK = '\r\n'; +FormData.DEFAULT_CONTENT_TYPE = 'application/octet-stream'; -var MONTH_TO_NUM = { - jan:0, feb:1, mar:2, apr:3, may:4, jun:5, - jul:6, aug:7, sep:8, oct:9, nov:10, dec:11 -}; -var NUM_TO_MONTH = [ - 'Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec' -]; -var NUM_TO_DAY = [ - 'Sun','Mon','Tue','Wed','Thu','Fri','Sat' -]; +FormData.prototype.append = function(field, value, options) { -var MAX_TIME = 2147483647000; // 31-bit max -var MIN_TIME = 0; // 31-bit min + options = options || {}; -/* - * Parses a Natural number (i.e., non-negative integer) with either the - * *DIGIT ( non-digit *OCTET ) - * or - * *DIGIT - * grammar (RFC6265 S5.1.1). - * - * The "trailingOK" boolean controls if the grammar accepts a - * "( non-digit *OCTET )" trailer. - */ -function parseDigits(token, minDigits, maxDigits, trailingOK) { - var count = 0; - while (count < token.length) { - var c = token.charCodeAt(count); - // "non-digit = %x00-2F / %x3A-FF" - if (c <= 0x2F || c >= 0x3A) { - break; - } - count++; + // allow filename as single option + if (typeof options == 'string') { + options = {filename: options}; } - // constrain to a minimum and maximum number of digits. - if (count < minDigits || count > maxDigits) { - return null; + var append = CombinedStream.prototype.append.bind(this); + + // all that streamy business can't handle numbers + if (typeof value == 'number') { + value = '' + value; } - if (!trailingOK && count != token.length) { - return null; + // https://github.com/felixge/node-form-data/issues/38 + if (util.isArray(value)) { + // Please convert your array into string + // the way web server expects it + this._error(new Error('Arrays are not supported.')); + return; } - return parseInt(token.substr(0,count), 10); -} + var header = this._multiPartHeader(field, value, options); + var footer = this._multiPartFooter(); -function parseTime(token) { - var parts = token.split(':'); - var result = [0,0,0]; + append(header); + append(value); + append(footer); - /* RF6256 S5.1.1: - * time = hms-time ( non-digit *OCTET ) - * hms-time = time-field ":" time-field ":" time-field - * time-field = 1*2DIGIT - */ + // pass along options.knownLength + this._trackLength(header, value, options); +}; - if (parts.length !== 3) { - return null; - } +FormData.prototype._trackLength = function(header, value, options) { + var valueLength = 0; - for (var i = 0; i < 3; i++) { - // "time-field" must be strictly "1*2DIGIT", HOWEVER, "hms-time" can be - // followed by "( non-digit *OCTET )" so therefore the last time-field can - // have a trailer - var trailingOK = (i == 2); - var num = parseDigits(parts[i], 1, 2, trailingOK); - if (num === null) { - return null; - } - result[i] = num; + // used w/ getLengthSync(), when length is known. + // e.g. for streaming directly from a remote server, + // w/ a known file a size, and not wanting to wait for + // incoming file to finish to get its size. + if (options.knownLength != null) { + valueLength += +options.knownLength; + } else if (Buffer.isBuffer(value)) { + valueLength = value.length; + } else if (typeof value === 'string') { + valueLength = Buffer.byteLength(value); } - return result; -} + this._valueLength += valueLength; -function parseMonth(token) { - token = String(token).substr(0,3).toLowerCase(); - var num = MONTH_TO_NUM[token]; - return num >= 0 ? num : null; -} + // @check why add CRLF? does this account for custom/multiple CRLFs? + this._overheadLength += + Buffer.byteLength(header) + + FormData.LINE_BREAK.length; -/* - * RFC6265 S5.1.1 date parser (see RFC for full grammar) - */ -function parseDate(str) { - if (!str) { + // empty or either doesn't have path or not an http response + if (!value || ( !value.path && !(value.readable && value.hasOwnProperty('httpVersion')) )) { return; } - /* RFC6265 S5.1.1: - * 2. Process each date-token sequentially in the order the date-tokens - * appear in the cookie-date - */ - var tokens = str.split(DATE_DELIM); - if (!tokens) { - return; + // no need to bother with the length + if (!options.knownLength) { + this._valuesToMeasure.push(value); } +}; - var hour = null; - var minute = null; - var second = null; - var dayOfMonth = null; - var month = null; - var year = null; +FormData.prototype._lengthRetriever = function(value, callback) { - for (var i=0; i read file till the end + // + // TODO: Looks like there is bug in Node fs.createReadStream + // it doesn't respect `end` options without `start` options + // Fix it when node fixes it. + // https://github.com/joyent/node/issues/7819 + if (value.end != undefined && value.end != Infinity && value.start != undefined) { - /* 2.1. If the found-time flag is not set and the token matches the time - * production, set the found-time flag and set the hour- value, - * minute-value, and second-value to the numbers denoted by the digits in - * the date-token, respectively. Skip the remaining sub-steps and continue - * to the next date-token. - */ - if (second === null) { - result = parseTime(token); - if (result) { - hour = result[0]; - minute = result[1]; - second = result[2]; - continue; - } - } + // when end specified + // no need to calculate range + // inclusive, starts with 0 + callback(null, value.end + 1 - (value.start ? value.start : 0)); - /* 2.2. If the found-day-of-month flag is not set and the date-token matches - * the day-of-month production, set the found-day-of- month flag and set - * the day-of-month-value to the number denoted by the date-token. Skip - * the remaining sub-steps and continue to the next date-token. - */ - if (dayOfMonth === null) { - // "day-of-month = 1*2DIGIT ( non-digit *OCTET )" - result = parseDigits(token, 1, 2, true); - if (result !== null) { - dayOfMonth = result; - continue; - } - } + // not that fast snoopy + } else { + // still need to fetch file size from fs + fs.stat(value.path, function(err, stat) { - /* 2.3. If the found-month flag is not set and the date-token matches the - * month production, set the found-month flag and set the month-value to - * the month denoted by the date-token. Skip the remaining sub-steps and - * continue to the next date-token. - */ - if (month === null) { - result = parseMonth(token); - if (result !== null) { - month = result; - continue; - } - } + var fileSize; - /* 2.4. If the found-year flag is not set and the date-token matches the - * year production, set the found-year flag and set the year-value to the - * number denoted by the date-token. Skip the remaining sub-steps and - * continue to the next date-token. - */ - if (year === null) { - // "year = 2*4DIGIT ( non-digit *OCTET )" - result = parseDigits(token, 2, 4, true); - if (result !== null) { - year = result; - /* From S5.1.1: - * 3. If the year-value is greater than or equal to 70 and less - * than or equal to 99, increment the year-value by 1900. - * 4. If the year-value is greater than or equal to 0 and less - * than or equal to 69, increment the year-value by 2000. - */ - if (year >= 70 && year <= 99) { - year += 1900; - } else if (year >= 0 && year <= 69) { - year += 2000; + if (err) { + callback(err); + return; } - } + + // update final size based on the range options + fileSize = stat.size - (value.start ? value.start : 0); + callback(null, fileSize); + }); } + + // or http response + } else if (value.hasOwnProperty('httpVersion')) { + callback(null, +value.headers['content-length']); + + // or request stream http://github.com/mikeal/request + } else if (value.hasOwnProperty('httpModule')) { + // wait till response come back + value.on('response', function(response) { + value.pause(); + callback(null, +response.headers['content-length']); + }); + value.resume(); + + // something else + } else { + callback('Unknown stream'); } +}; - /* RFC 6265 S5.1.1 - * "5. Abort these steps and fail to parse the cookie-date if: - * * at least one of the found-day-of-month, found-month, found- - * year, or found-time flags is not set, - * * the day-of-month-value is less than 1 or greater than 31, - * * the year-value is less than 1601, - * * the hour-value is greater than 23, - * * the minute-value is greater than 59, or - * * the second-value is greater than 59. - * (Note that leap seconds cannot be represented in this syntax.)" - * - * So, in order as above: - */ - if ( - dayOfMonth === null || month === null || year === null || second === null || - dayOfMonth < 1 || dayOfMonth > 31 || - year < 1601 || - hour > 23 || - minute > 59 || - second > 59 - ) { - return; +FormData.prototype._multiPartHeader = function(field, value, options) { + // custom header specified (as string)? + // it becomes responsible for boundary + // (e.g. to handle extra CRLFs on .NET servers) + if (typeof options.header == 'string') { + return options.header; } - return new Date(Date.UTC(year, month, dayOfMonth, hour, minute, second)); -} + var contentDisposition = this._getContentDisposition(value, options); + var contentType = this._getContentType(value, options); -function formatDate(date) { - var d = date.getUTCDate(); d = d >= 10 ? d : '0'+d; - var h = date.getUTCHours(); h = h >= 10 ? h : '0'+h; - var m = date.getUTCMinutes(); m = m >= 10 ? m : '0'+m; - var s = date.getUTCSeconds(); s = s >= 10 ? s : '0'+s; - return NUM_TO_DAY[date.getUTCDay()] + ', ' + - d+' '+ NUM_TO_MONTH[date.getUTCMonth()] +' '+ date.getUTCFullYear() +' '+ - h+':'+m+':'+s+' GMT'; -} + var contents = ''; + var headers = { + // add custom disposition as third element or keep it two elements if not + 'Content-Disposition': ['form-data', 'name="' + field + '"'].concat(contentDisposition || []), + // if no content type. allow it to be empty array + 'Content-Type': [].concat(contentType || []) + }; -// S5.1.2 Canonicalized Host Names -function canonicalDomain(str) { - if (str == null) { - return null; + // allow custom headers. + if (typeof options.header == 'object') { + populate(headers, options.header); } - str = str.trim().replace(/^\./,''); // S4.1.2.3 & S5.2.3: ignore leading . - // convert to IDN if any non-ASCII characters - if (punycode && /[^\u0001-\u007f]/.test(str)) { - str = punycode.toASCII(str); - } + var header; + for (var prop in headers) { + if (!headers.hasOwnProperty(prop)) continue; + header = headers[prop]; - return str.toLowerCase(); -} + // skip nullish headers. + if (header == null) { + continue; + } -// S5.1.3 Domain Matching -function domainMatch(str, domStr, canonicalize) { - if (str == null || domStr == null) { - return null; - } - if (canonicalize !== false) { - str = canonicalDomain(str); - domStr = canonicalDomain(domStr); - } + // convert all headers to arrays. + if (!Array.isArray(header)) { + header = [header]; + } - /* - * "The domain string and the string are identical. (Note that both the - * domain string and the string will have been canonicalized to lower case at - * this point)" - */ - if (str == domStr) { - return true; + // add non-empty headers. + if (header.length) { + contents += prop + ': ' + header.join('; ') + FormData.LINE_BREAK; + } } - /* "All of the following [three] conditions hold:" (order adjusted from the RFC) */ + return '--' + this.getBoundary() + FormData.LINE_BREAK + contents + FormData.LINE_BREAK; +}; - /* "* The string is a host name (i.e., not an IP address)." */ - if (net.isIP(str)) { - return false; - } +FormData.prototype._getContentDisposition = function(value, options) { - /* "* The domain string is a suffix of the string" */ - var idx = str.indexOf(domStr); - if (idx <= 0) { - return false; // it's a non-match (-1) or prefix (0) - } + var filename + , contentDisposition + ; - // e.g "a.b.c".indexOf("b.c") === 2 - // 5 === 3+2 - if (str.length !== domStr.length + idx) { // it's not a suffix - return false; + if (typeof options.filepath === 'string') { + // custom filepath for relative paths + filename = path.normalize(options.filepath).replace(/\\/g, '/'); + } else if (options.filename || value.name || value.path) { + // custom filename take precedence + // formidable and the browser add a name property + // fs- and request- streams have path property + filename = path.basename(options.filename || value.name || value.path); + } else if (value.readable && value.hasOwnProperty('httpVersion')) { + // or try http response + filename = path.basename(value.client._httpMessage.path || ''); } - /* "* The last character of the string that is not included in the domain - * string is a %x2E (".") character." */ - if (str.substr(idx-1,1) !== '.') { - return false; + if (filename) { + contentDisposition = 'filename="' + filename + '"'; } - return true; -} + return contentDisposition; +}; +FormData.prototype._getContentType = function(value, options) { -// RFC6265 S5.1.4 Paths and Path-Match + // use custom content-type above all + var contentType = options.contentType; -/* - * "The user agent MUST use an algorithm equivalent to the following algorithm - * to compute the default-path of a cookie:" - * - * Assumption: the path (and not query part or absolute uri) is passed in. - */ -function defaultPath(path) { - // "2. If the uri-path is empty or if the first character of the uri-path is not - // a %x2F ("/") character, output %x2F ("/") and skip the remaining steps. - if (!path || path.substr(0,1) !== "/") { - return "/"; + // or try `name` from formidable, browser + if (!contentType && value.name) { + contentType = mime.lookup(value.name); } - // "3. If the uri-path contains no more than one %x2F ("/") character, output - // %x2F ("/") and skip the remaining step." - if (path === "/") { - return path; + // or try `path` from fs-, request- streams + if (!contentType && value.path) { + contentType = mime.lookup(value.path); } - var rightSlash = path.lastIndexOf("/"); - if (rightSlash === 0) { - return "/"; + // or if it's http-reponse + if (!contentType && value.readable && value.hasOwnProperty('httpVersion')) { + contentType = value.headers['content-type']; } - // "4. Output the characters of the uri-path from the first character up to, - // but not including, the right-most %x2F ("/")." - return path.slice(0, rightSlash); -} + // or guess it from the filepath or filename + if (!contentType && (options.filepath || options.filename)) { + contentType = mime.lookup(options.filepath || options.filename); + } -function trimTerminator(str) { - for (var t = 0; t < TERMINATORS.length; t++) { - var terminatorIdx = str.indexOf(TERMINATORS[t]); - if (terminatorIdx !== -1) { - str = str.substr(0,terminatorIdx); - } + // fallback to the default content type if `value` is not simple value + if (!contentType && typeof value == 'object') { + contentType = FormData.DEFAULT_CONTENT_TYPE; } - return str; -} + return contentType; +}; -function parseCookiePair(cookiePair, looseMode) { - cookiePair = trimTerminator(cookiePair); +FormData.prototype._multiPartFooter = function() { + return function(next) { + var footer = FormData.LINE_BREAK; - var firstEq = cookiePair.indexOf('='); - if (looseMode) { - if (firstEq === 0) { // '=' is immediately at start - cookiePair = cookiePair.substr(1); - firstEq = cookiePair.indexOf('='); // might still need to split on '=' + var lastPart = (this._streams.length === 0); + if (lastPart) { + footer += this._lastBoundary(); } - } else { // non-loose mode - if (firstEq <= 0) { // no '=' or is at start - return; // needs to have non-empty "cookie-name" + + next(footer); + }.bind(this); +}; + +FormData.prototype._lastBoundary = function() { + return '--' + this.getBoundary() + '--' + FormData.LINE_BREAK; +}; + +FormData.prototype.getHeaders = function(userHeaders) { + var header; + var formHeaders = { + 'content-type': 'multipart/form-data; boundary=' + this.getBoundary() + }; + + for (header in userHeaders) { + if (userHeaders.hasOwnProperty(header)) { + formHeaders[header.toLowerCase()] = userHeaders[header]; } } - var cookieName, cookieValue; - if (firstEq <= 0) { - cookieName = ""; - cookieValue = cookiePair.trim(); - } else { - cookieName = cookiePair.substr(0, firstEq).trim(); - cookieValue = cookiePair.substr(firstEq+1).trim(); + return formHeaders; +}; + +FormData.prototype.getBoundary = function() { + if (!this._boundary) { + this._generateBoundary(); } - if (CONTROL_CHARS.test(cookieName) || CONTROL_CHARS.test(cookieValue)) { - return; + return this._boundary; +}; + +FormData.prototype.getBuffer = function() { + var dataBuffer = new Buffer.alloc( 0 ); + var boundary = this.getBoundary(); + + // Create the form content. Add Line breaks to the end of data. + for (var i = 0, len = this._streams.length; i < len; i++) { + if (typeof this._streams[i] !== 'function') { + + // Add content to the buffer. + if(Buffer.isBuffer(this._streams[i])) { + dataBuffer = Buffer.concat( [dataBuffer, this._streams[i]]); + }else { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(this._streams[i])]); + } + + // Add break after content. + if (typeof this._streams[i] !== 'string' || this._streams[i].substring( 2, boundary.length + 2 ) !== boundary) { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(FormData.LINE_BREAK)] ); + } + } } - var c = new Cookie(); - c.key = cookieName; - c.value = cookieValue; - return c; -} + // Add the footer and return the Buffer object. + return Buffer.concat( [dataBuffer, Buffer.from(this._lastBoundary())] ); +}; -function parse(str, options) { - if (!options || typeof options !== 'object') { - options = {}; +FormData.prototype._generateBoundary = function() { + // This generates a 50 character boundary similar to those used by Firefox. + // They are optimized for boyer-moore parsing. + var boundary = '--------------------------'; + for (var i = 0; i < 24; i++) { + boundary += Math.floor(Math.random() * 10).toString(16); } - str = str.trim(); - // We use a regex to parse the "name-value-pair" part of S5.2 - var firstSemi = str.indexOf(';'); // S5.2 step 1 - var cookiePair = (firstSemi === -1) ? str : str.substr(0, firstSemi); - var c = parseCookiePair(cookiePair, !!options.loose); - if (!c) { - return; + this._boundary = boundary; +}; + +// Note: getLengthSync DOESN'T calculate streams length +// As workaround one can calculate file size manually +// and add it as knownLength option +FormData.prototype.getLengthSync = function() { + var knownLength = this._overheadLength + this._valueLength; + + // Don't get confused, there are 3 "internal" streams for each keyval pair + // so it basically checks if there is any value added to the form + if (this._streams.length) { + knownLength += this._lastBoundary().length; } - if (firstSemi === -1) { - return c; + // https://github.com/form-data/form-data/issues/40 + if (!this.hasKnownLength()) { + // Some async length retrievers are present + // therefore synchronous length calculation is false. + // Please use getLength(callback) to get proper length + this._error(new Error('Cannot calculate proper length in synchronous way.')); } - // S5.2.3 "unparsed-attributes consist of the remainder of the set-cookie-string - // (including the %x3B (";") in question)." plus later on in the same section - // "discard the first ";" and trim". - var unparsed = str.slice(firstSemi + 1).trim(); + return knownLength; +}; - // "If the unparsed-attributes string is empty, skip the rest of these - // steps." - if (unparsed.length === 0) { - return c; +// Public API to check if length of added values is known +// https://github.com/form-data/form-data/issues/196 +// https://github.com/form-data/form-data/issues/262 +FormData.prototype.hasKnownLength = function() { + var hasKnownLength = true; + + if (this._valuesToMeasure.length) { + hasKnownLength = false; } - /* - * S5.2 says that when looping over the items "[p]rocess the attribute-name - * and attribute-value according to the requirements in the following - * subsections" for every item. Plus, for many of the individual attributes - * in S5.3 it says to use the "attribute-value of the last attribute in the - * cookie-attribute-list". Therefore, in this implementation, we overwrite - * the previous value. - */ - var cookie_avs = unparsed.split(';'); - while (cookie_avs.length) { - var av = cookie_avs.shift().trim(); - if (av.length === 0) { // happens if ";;" appears - continue; - } - var av_sep = av.indexOf('='); - var av_key, av_value; + return hasKnownLength; +}; - if (av_sep === -1) { - av_key = av; - av_value = null; - } else { - av_key = av.substr(0,av_sep); - av_value = av.substr(av_sep+1); - } +FormData.prototype.getLength = function(cb) { + var knownLength = this._overheadLength + this._valueLength; - av_key = av_key.trim().toLowerCase(); + if (this._streams.length) { + knownLength += this._lastBoundary().length; + } - if (av_value) { - av_value = av_value.trim(); + if (!this._valuesToMeasure.length) { + process.nextTick(cb.bind(this, null, knownLength)); + return; + } + + asynckit.parallel(this._valuesToMeasure, this._lengthRetriever, function(err, values) { + if (err) { + cb(err); + return; } - switch(av_key) { - case 'expires': // S5.2.1 - if (av_value) { - var exp = parseDate(av_value); - // "If the attribute-value failed to parse as a cookie date, ignore the - // cookie-av." - if (exp) { - // over and underflow not realistically a concern: V8's getTime() seems to - // store something larger than a 32-bit time_t (even with 32-bit node) - c.expires = exp; - } - } - break; + values.forEach(function(length) { + knownLength += length; + }); - case 'max-age': // S5.2.2 - if (av_value) { - // "If the first character of the attribute-value is not a DIGIT or a "-" - // character ...[or]... If the remainder of attribute-value contains a - // non-DIGIT character, ignore the cookie-av." - if (/^-?[0-9]+$/.test(av_value)) { - var delta = parseInt(av_value, 10); - // "If delta-seconds is less than or equal to zero (0), let expiry-time - // be the earliest representable date and time." - c.setMaxAge(delta); - } - } - break; + cb(null, knownLength); + }); +}; - case 'domain': // S5.2.3 - // "If the attribute-value is empty, the behavior is undefined. However, - // the user agent SHOULD ignore the cookie-av entirely." - if (av_value) { - // S5.2.3 "Let cookie-domain be the attribute-value without the leading %x2E - // (".") character." - var domain = av_value.trim().replace(/^\./, ''); - if (domain) { - // "Convert the cookie-domain to lower case." - c.domain = domain.toLowerCase(); - } - } - break; +FormData.prototype.submit = function(params, cb) { + var request + , options + , defaults = {method: 'post'} + ; - case 'path': // S5.2.4 - /* - * "If the attribute-value is empty or if the first character of the - * attribute-value is not %x2F ("/"): - * Let cookie-path be the default-path. - * Otherwise: - * Let cookie-path be the attribute-value." - * - * We'll represent the default-path as null since it depends on the - * context of the parsing. - */ - c.path = av_value && av_value[0] === "/" ? av_value : null; - break; + // parse provided url if it's string + // or treat it as options object + if (typeof params == 'string') { - case 'secure': // S5.2.5 - /* - * "If the attribute-name case-insensitively matches the string "Secure", - * the user agent MUST append an attribute to the cookie-attribute-list - * with an attribute-name of Secure and an empty attribute-value." - */ - c.secure = true; - break; + params = parseUrl(params); + options = populate({ + port: params.port, + path: params.pathname, + host: params.hostname, + protocol: params.protocol + }, defaults); - case 'httponly': // S5.2.6 -- effectively the same as 'secure' - c.httpOnly = true; - break; + // use custom params + } else { - default: - c.extensions = c.extensions || []; - c.extensions.push(av); - break; + options = populate(params, defaults); + // if no port provided use default one + if (!options.port) { + options.port = options.protocol == 'https:' ? 443 : 80; } } - return c; -} - -// avoid the V8 deoptimization monster! -function jsonParse(str) { - var obj; - try { - obj = JSON.parse(str); - } catch (e) { - return e; - } - return obj; -} - -function fromJSON(str) { - if (!str) { - return null; - } + // put that good code in getHeaders to some use + options.headers = this.getHeaders(params.headers); - var obj; - if (typeof str === 'string') { - obj = jsonParse(str); - if (obj instanceof Error) { - return null; - } + // https if specified, fallback to http in any other case + if (options.protocol == 'https:') { + request = https.request(options); } else { - // assume it's an Object - obj = str; + request = http.request(options); } - var c = new Cookie(); - for (var i=0; i { - // ascending for time: a CMP b - var aTime = a.creation ? a.creation.getTime() : MAX_TIME; - var bTime = b.creation ? b.creation.getTime() : MAX_TIME; - cmp = aTime - bTime; - if (cmp !== 0) { - return cmp; - } +// populates missing values +module.exports = function(dst, src) { - // break ties for the same millisecond (precision of JavaScript's clock) - cmp = a.creationIndex - b.creationIndex; + Object.keys(src).forEach(function(prop) + { + dst[prop] = dst[prop] || src[prop]; + }); - return cmp; -} + return dst; +}; -// Gives the permutation of all possible pathMatch()es of a given path. The -// array is in longest-to-shortest order. Handy for indexing. -function permutePath(path) { - if (path === '/') { - return ['/']; - } - if (path.lastIndexOf('/') === path.length-1) { - path = path.substr(0,path.length-1); - } - var permutations = [path]; - while (path.length > 1) { - var lindex = path.lastIndexOf('/'); - if (lindex === 0) { - break; - } - path = path.substr(0,lindex); - permutations.push(path); - } - permutations.push('/'); - return permutations; -} - -function getCookieContext(url) { - if (url instanceof Object) { - return url; - } - // NOTE: decodeURI will throw on malformed URIs (see GH-32). - // Therefore, we will just skip decoding for such URIs. - try { - url = decodeURI(url); - } - catch(err) { - // Silently swallow error - } - - return urlParse(url); -} - -function Cookie(options) { - options = options || {}; - - Object.keys(options).forEach(function(prop) { - if (Cookie.prototype.hasOwnProperty(prop) && - Cookie.prototype[prop] !== options[prop] && - prop.substr(0,1) !== '_') - { - this[prop] = options[prop]; - } - }, this); - this.creation = this.creation || new Date(); - - // used to break creation ties in cookieCompare(): - Object.defineProperty(this, 'creationIndex', { - configurable: false, - enumerable: false, // important for assert.deepEqual checks - writable: true, - value: ++Cookie.cookiesCreated - }); -} +/***/ }), -Cookie.cookiesCreated = 0; // incremented each time a cookie is created +/***/ 35390: +/***/ ((__unused_webpack_module, __webpack_exports__, __nccwpck_require__) => { -Cookie.parse = parse; -Cookie.fromJSON = fromJSON; +"use strict"; +__nccwpck_require__.r(__webpack_exports__); +/* harmony export */ __nccwpck_require__.d(__webpack_exports__, { +/* harmony export */ "__extends": () => /* binding */ __extends, +/* harmony export */ "__assign": () => /* binding */ __assign, +/* harmony export */ "__rest": () => /* binding */ __rest, +/* harmony export */ "__decorate": () => /* binding */ __decorate, +/* harmony export */ "__param": () => /* binding */ __param, +/* harmony export */ "__metadata": () => /* binding */ __metadata, +/* harmony export */ "__awaiter": () => /* binding */ __awaiter, +/* harmony export */ "__generator": () => /* binding */ __generator, +/* harmony export */ "__createBinding": () => /* binding */ __createBinding, +/* harmony export */ "__exportStar": () => /* binding */ __exportStar, +/* harmony export */ "__values": () => /* binding */ __values, +/* harmony export */ "__read": () => /* binding */ __read, +/* harmony export */ "__spread": () => /* binding */ __spread, +/* harmony export */ "__spreadArrays": () => /* binding */ __spreadArrays, +/* harmony export */ "__await": () => /* binding */ __await, +/* harmony export */ "__asyncGenerator": () => /* binding */ __asyncGenerator, +/* harmony export */ "__asyncDelegator": () => /* binding */ __asyncDelegator, +/* harmony export */ "__asyncValues": () => /* binding */ __asyncValues, +/* harmony export */ "__makeTemplateObject": () => /* binding */ __makeTemplateObject, +/* harmony export */ "__importStar": () => /* binding */ __importStar, +/* harmony export */ "__importDefault": () => /* binding */ __importDefault, +/* harmony export */ "__classPrivateFieldGet": () => /* binding */ __classPrivateFieldGet, +/* harmony export */ "__classPrivateFieldSet": () => /* binding */ __classPrivateFieldSet +/* harmony export */ }); +/*! ***************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global Reflect, Promise */ + +var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; + return extendStatics(d, b); +}; + +function __extends(d, b) { + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} + +var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + } + return __assign.apply(this, arguments); +} + +function __rest(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; +} + +function __decorate(decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; +} + +function __param(paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } +} + +function __metadata(metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); +} + +function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} + +function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +} + +function __createBinding(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +} + +function __exportStar(m, exports) { + for (var p in m) if (p !== "default" && !exports.hasOwnProperty(p)) exports[p] = m[p]; +} + +function __values(o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); +} + +function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +} + +function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} + +function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; +}; + +function __await(v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); +} + +function __asyncGenerator(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +} + +function __asyncDelegator(o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } +} + +function __asyncValues(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +} + +function __makeTemplateObject(cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; + +function __importStar(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result.default = mod; + return result; +} + +function __importDefault(mod) { + return (mod && mod.__esModule) ? mod : { default: mod }; +} + +function __classPrivateFieldGet(receiver, privateMap) { + if (!privateMap.has(receiver)) { + throw new TypeError("attempted to get private field on non-instance"); + } + return privateMap.get(receiver); +} + +function __classPrivateFieldSet(receiver, privateMap, value) { + if (!privateMap.has(receiver)) { + throw new TypeError("attempted to set private field on non-instance"); + } + privateMap.set(receiver, value); + return value; +} -Cookie.prototype.key = ""; -Cookie.prototype.value = ""; -// the order in which the RFC has them: -Cookie.prototype.expires = "Infinity"; // coerces to literal Infinity -Cookie.prototype.maxAge = null; // takes precedence over expires for TTL -Cookie.prototype.domain = null; -Cookie.prototype.path = null; -Cookie.prototype.secure = false; -Cookie.prototype.httpOnly = false; -Cookie.prototype.extensions = null; +/***/ }), -// set by the CookieJar: -Cookie.prototype.hostOnly = null; // boolean when set -Cookie.prototype.pathIsDefault = null; // boolean when set -Cookie.prototype.creation = null; // Date when set; defaulted by Cookie.parse -Cookie.prototype.lastAccessed = null; // Date when set -Object.defineProperty(Cookie.prototype, 'creationIndex', { - configurable: true, - enumerable: false, - writable: true, - value: 0 -}); +/***/ 2265: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { -Cookie.serializableProperties = Object.keys(Cookie.prototype) - .filter(function(prop) { - return !( - Cookie.prototype[prop] instanceof Function || - prop === 'creationIndex' || - prop.substr(0,1) === '_' - ); - }); +"use strict"; -Cookie.prototype.inspect = function inspect() { - var now = Date.now(); - return 'Cookie="'+this.toString() + - '; hostOnly='+(this.hostOnly != null ? this.hostOnly : '?') + - '; aAge='+(this.lastAccessed ? (now-this.lastAccessed.getTime())+'ms' : '?') + - '; cAge='+(this.creation ? (now-this.creation.getTime())+'ms' : '?') + - '"'; +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); }; - -// Use the new custom inspection symbol to add the custom inspect function if -// available. -if (util.inspect.custom) { - Cookie.prototype[util.inspect.custom] = Cookie.prototype.inspect; -} - -Cookie.prototype.toJSON = function() { - var obj = {}; - - var props = Cookie.serializableProperties; - for (var i=0; i\n-----END PRIVATE KEY-----\n-----BEGIN CERTIFICATE-----\n\n-----END CERTIFICATE-----\n" + * - CertificateFilePath: **Absolute** file path of the .pem file. + * @param domain The domain or tenant id containing this application. + * @param options AzureTokenCredentialsOptions - Object representing optional parameters. + * + * @returns ApplicationTokenCertificateCredentials + */ + static create(clientId, certificateStringOrFilePath, domain, options) { + if (!certificateStringOrFilePath || + typeof certificateStringOrFilePath.valueOf() !== "string") { + throw new Error("'certificateStringOrFilePath' must be a non empty string."); + } + if (!certificateStringOrFilePath.startsWith("-----BEGIN")) { + certificateStringOrFilePath = fs_1.readFileSync(certificateStringOrFilePath, "utf8"); + } + const certificatePattern = /(-+BEGIN CERTIFICATE-+)(\n\r?|\r\n?)([A-Za-z0-9\+\/\n\r]+\=*)(\n\r?|\r\n?)(-+END CERTIFICATE-+)/; + const matchCert = certificateStringOrFilePath.match(certificatePattern); + const rawCertificate = matchCert ? matchCert[3] : ""; + if (!rawCertificate) { + throw new Error("Unable to correctly parse the certificate from the value provided in 'certificateStringOrFilePath' "); + } + const thumbprint = crypto_1.createHash("sha1") + .update(Buffer.from(rawCertificate, "base64")) + .digest("hex"); + return new ApplicationTokenCertificateCredentials(clientId, domain, certificateStringOrFilePath, thumbprint, options.tokenAudience, options.environment, options.tokenCache); } - } - return true; -}; - -Cookie.prototype.setExpires = function setExpires(exp) { - if (exp instanceof Date) { - this.expires = exp; - } else { - this.expires = parseDate(exp) || "Infinity"; - } -}; - -Cookie.prototype.setMaxAge = function setMaxAge(age) { - if (age === Infinity || age === -Infinity) { - this.maxAge = age.toString(); // so JSON.stringify() works - } else { - this.maxAge = age; - } -}; - -// gives Cookie header format -Cookie.prototype.cookieString = function cookieString() { - var val = this.value; - if (val == null) { - val = ''; - } - if (this.key === '') { - return val; - } - return this.key+'='+val; -}; - -// gives Set-Cookie header format -Cookie.prototype.toString = function toString() { - var str = this.cookieString(); +} +exports.ApplicationTokenCertificateCredentials = ApplicationTokenCertificateCredentials; +//# sourceMappingURL=applicationTokenCertificateCredentials.js.map - if (this.expires != Infinity) { - if (this.expires instanceof Date) { - str += '; Expires='+formatDate(this.expires); - } else { - str += '; Expires='+this.expires; - } - } +/***/ }), - if (this.maxAge != null && this.maxAge != Infinity) { - str += '; Max-Age='+this.maxAge; - } +/***/ 35216: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - if (this.domain && !this.hostOnly) { - str += '; Domain='+this.domain; - } - if (this.path) { - str += '; Path='+this.path; - } +"use strict"; - if (this.secure) { - str += '; Secure'; - } - if (this.httpOnly) { - str += '; HttpOnly'; - } - if (this.extensions) { - this.extensions.forEach(function(ext) { - str += '; '+ext; +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); }); - } - - return str; }; - -// TTL() partially replaces the "expiry-time" parts of S5.3 step 3 (setCookie() -// elsewhere) -// S5.3 says to give the "latest representable date" for which we use Infinity -// For "expired" we use 0 -Cookie.prototype.TTL = function TTL(now) { - /* RFC6265 S4.1.2.2 If a cookie has both the Max-Age and the Expires - * attribute, the Max-Age attribute has precedence and controls the - * expiration date of the cookie. - * (Concurs with S5.3 step 3) - */ - if (this.maxAge != null) { - return this.maxAge<=0 ? 0 : this.maxAge*1000; - } - - var expires = this.expires; - if (expires != Infinity) { - if (!(expires instanceof Date)) { - expires = parseDate(expires) || Infinity; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const applicationTokenCredentialsBase_1 = __nccwpck_require__(60968); +const authConstants_1 = __nccwpck_require__(26700); +class ApplicationTokenCredentials extends applicationTokenCredentialsBase_1.ApplicationTokenCredentialsBase { + /** + * Creates a new ApplicationTokenCredentials object. + * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} + * for detailed instructions on creating an Azure Active Directory application. + * @constructor + * @param {string} clientId The active directory application client id. + * @param {string} domain The domain or tenant id containing this application. + * @param {string} secret The authentication secret for the application. + * @param {string} [tokenAudience] The audience for which the token is requested. Valid values are 'graph', 'batch', or any other resource like 'https://vault.azure.net/'. + * If tokenAudience is 'graph' then domain should also be provided and its value should not be the default 'common' tenant. It must be a string (preferrably in a guid format). + * @param {Environment} [environment] The azure environment to authenticate with. + * @param {object} [tokenCache] The token cache. Default value is the MemoryCache object from adal. + */ + constructor(clientId, domain, secret, tokenAudience, environment, tokenCache) { + if (!secret || typeof secret.valueOf() !== "string") { + throw new Error("secret must be a non empty string."); + } + super(clientId, domain, tokenAudience, environment, tokenCache); + this.secret = secret; } - - if (expires == Infinity) { - return Infinity; + /** + * Tries to get the token from cache initially. If that is unsuccessfull then it tries to get the token from ADAL. + * @returns {Promise} A promise that resolves to TokenResponse and rejects with an Error. + */ + getToken() { + return __awaiter(this, void 0, void 0, function* () { + try { + const tokenResponse = yield this.getTokenFromCache(); + return tokenResponse; + } + catch (error) { + if (error.message && + error.message.startsWith(authConstants_1.AuthConstants.SDK_INTERNAL_ERROR)) { + return Promise.reject(error); + } + const resource = this.getActiveDirectoryResourceId(); + return new Promise((resolve, reject) => { + this.authContext.acquireTokenWithClientCredentials(resource, this.clientId, this.secret, (error, tokenResponse) => { + if (error) { + return reject(error); + } + if (tokenResponse.error || tokenResponse.errorDescription) { + return reject(tokenResponse); + } + return resolve(tokenResponse); + }); + }); + } + }); } +} +exports.ApplicationTokenCredentials = ApplicationTokenCredentials; +//# sourceMappingURL=applicationTokenCredentials.js.map - return expires.getTime() - (now || Date.now()); - } - - return Infinity; -}; - -// expiryTime() replaces the "expiry-time" parts of S5.3 step 3 (setCookie() -// elsewhere) -Cookie.prototype.expiryTime = function expiryTime(now) { - if (this.maxAge != null) { - var relativeTo = now || this.creation || new Date(); - var age = (this.maxAge <= 0) ? -Infinity : this.maxAge*1000; - return relativeTo.getTime() + age; - } - - if (this.expires == Infinity) { - return Infinity; - } - return this.expires.getTime(); -}; +/***/ }), -// expiryDate() replaces the "expiry-time" parts of S5.3 step 3 (setCookie() -// elsewhere), except it returns a Date -Cookie.prototype.expiryDate = function expiryDate(now) { - var millisec = this.expiryTime(now); - if (millisec == Infinity) { - return new Date(MAX_TIME); - } else if (millisec == -Infinity) { - return new Date(MIN_TIME); - } else { - return new Date(millisec); - } -}; +/***/ 60968: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { -// This replaces the "persistent-flag" parts of S5.3 step 3 -Cookie.prototype.isPersistent = function isPersistent() { - return (this.maxAge != null || this.expires != Infinity); -}; +"use strict"; -// Mostly S5.1.2 and S5.2.3: -Cookie.prototype.cdomain = -Cookie.prototype.canonicalizedDomain = function canonicalizedDomain() { - if (this.domain == null) { - return null; - } - return canonicalDomain(this.domain); +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); }; - -function CookieJar(store, options) { - if (typeof options === "boolean") { - options = {rejectPublicSuffixes: options}; - } else if (options == null) { - options = {}; - } - if (options.rejectPublicSuffixes != null) { - this.rejectPublicSuffixes = options.rejectPublicSuffixes; - } - if (options.looseMode != null) { - this.enableLooseMode = options.looseMode; - } - - if (!store) { - store = new MemoryCookieStore(); - } - this.store = store; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tokenCredentialsBase_1 = __nccwpck_require__(5558); +const authConstants_1 = __nccwpck_require__(26700); +class ApplicationTokenCredentialsBase extends tokenCredentialsBase_1.TokenCredentialsBase { + /** + * Creates a new ApplicationTokenCredentials object. + * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} + * for detailed instructions on creating an Azure Active Directory application. + * @constructor + * @param {string} clientId The active directory application client id. + * @param {string} domain The domain or tenant id containing this application. + * @param {string} [tokenAudience] The audience for which the token is requested. Valid values are 'graph', 'batch', or any other resource like 'https://vault.azure.net/'. + * If tokenAudience is 'graph' then domain should also be provided and its value should not be the default 'common' tenant. It must be a string (preferrably in a guid format). + * @param {Environment} [environment] The azure environment to authenticate with. + * @param {object} [tokenCache] The token cache. Default value is the MemoryCache object from adal. + */ + constructor(clientId, domain, tokenAudience, environment, tokenCache) { + super(clientId, domain, tokenAudience, environment, tokenCache); + } + getTokenFromCache() { + const _super = Object.create(null, { + getTokenFromCache: { get: () => super.getTokenFromCache } + }); + return __awaiter(this, void 0, void 0, function* () { + const self = this; + // a thin wrapper over the base implementation. try get token from cache, additionaly clean up cache if required. + try { + const tokenResponse = yield _super.getTokenFromCache.call(this, undefined); + return Promise.resolve(tokenResponse); + } + catch (error) { + // Remove the stale token from the tokencache. ADAL gives the same error message "Entry not found in cache." + // for entry not being present in the cache and for accessToken being expired in the cache. We do not want the token cache + // to contain the expired token, we clean it up here. + const status = yield self.removeInvalidItemsFromCache({ + _clientId: self.clientId + }); + if (status.result) { + return Promise.reject(error); + } + const message = status && status.details && status.details.message + ? status.details.message + : status.details; + return Promise.reject(new Error(authConstants_1.AuthConstants.SDK_INTERNAL_ERROR + + " : " + + "critical failure while removing expired token for service principal from token cache. " + + message)); + } + }); + } + /** + * Removes invalid items from token cache. This method is different. Here we never reject in case of error. + * Rather we resolve with an object that says the result is false and error information is provided in + * the details property of the resolved object. This is done to do better error handling in the above function + * where removeInvalidItemsFromCache() is called. + * @param {object} query The query to be used for finding the token for service principal from the cache + * @returns {result: boolean, details?: Error} resultObject with more info. + */ + removeInvalidItemsFromCache(query) { + const self = this; + return new Promise(resolve => { + self.tokenCache.find(query, (error, entries) => { + if (error) { + return resolve({ result: false, details: error }); + } + if (entries && entries.length > 0) { + return new Promise(resolve => { + return self.tokenCache.remove(entries, (err) => { + if (err) { + return resolve({ result: false, details: err }); + } + return resolve({ result: true }); + }); + }); + } + else { + return resolve({ result: true }); + } + }); + }); + } } -CookieJar.prototype.store = null; -CookieJar.prototype.rejectPublicSuffixes = true; -CookieJar.prototype.enableLooseMode = false; -var CAN_BE_SYNC = []; - -CAN_BE_SYNC.push('setCookie'); -CookieJar.prototype.setCookie = function(cookie, url, options, cb) { - var err; - var context = getCookieContext(url); - if (options instanceof Function) { - cb = options; - options = {}; - } - - var host = canonicalDomain(context.hostname); - var loose = this.enableLooseMode; - if (options.loose != null) { - loose = options.loose; - } - - // S5.3 step 1 - if (!(cookie instanceof Cookie)) { - cookie = Cookie.parse(cookie, { loose: loose }); - } - if (!cookie) { - err = new Error("Cookie failed to parse"); - return cb(options.ignoreError ? null : err); - } +exports.ApplicationTokenCredentialsBase = ApplicationTokenCredentialsBase; +//# sourceMappingURL=applicationTokenCredentialsBase.js.map - // S5.3 step 2 - var now = options.now || new Date(); // will assign later to save effort in the face of errors +/***/ }), - // S5.3 step 3: NOOP; persistent-flag and expiry-time is handled by getCookie() +/***/ 38909: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - // S5.3 step 4: NOOP; domain is null by default +"use strict"; - // S5.3 step 5: public suffixes - if (this.rejectPublicSuffixes && cookie.domain) { - var suffix = pubsuffix.getPublicSuffix(cookie.cdomain()); - if (suffix == null) { // e.g. "com" - err = new Error("Cookie has domain set to a public suffix"); - return cb(options.ignoreError ? null : err); +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const ms_rest_js_1 = __nccwpck_require__(30812); +const login_1 = __nccwpck_require__(29325); +/** + * Describes the credentials by retrieving token via Azure CLI. + */ +class AzureCliCredentials { + constructor(subscriptionInfo, tokenInfo, + // tslint:disable-next-line: no-inferrable-types + resource = "https://management.azure.com") { + /** + * Azure resource endpoints. + * - Defaults to Azure Resource Manager from environment: AzureCloud. "https://management.azure.com" + * - For Azure KeyVault: "https://vault.azure.net" + * - For Azure Batch: "https://batch.core.windows.net" + * - For Azure Active Directory Graph: "https://graph.windows.net" + * + * To get the resource for other clouds: + * - `az cloud list` + */ + // tslint:disable-next-line: no-inferrable-types + this.resource = "https://management.azure.com"; + /** + * The number of seconds within which it is good to renew the token. + * A constant set to 270 seconds (4.5 minutes). + */ + this._tokenRenewalMarginInSeconds = 270; + this.subscriptionInfo = subscriptionInfo; + this.tokenInfo = tokenInfo; + this.resource = resource; } - } - - // S5.3 step 6: - if (cookie.domain) { - if (!domainMatch(host, cookie.cdomain(), false)) { - err = new Error("Cookie not in this host's domain. Cookie:"+cookie.cdomain()+" Request:"+host); - return cb(options.ignoreError ? null : err); + /** + * Tries to get the new token from Azure CLI, if the token has expired or the subscription has + * changed else uses the cached accessToken. + * @return The tokenResponse (tokenType and accessToken are the two important properties). + */ + getToken() { + return __awaiter(this, void 0, void 0, function* () { + if (this._hasTokenExpired() || this._hasSubscriptionChanged() || this._hasResourceChanged()) { + try { + // refresh the access token + this.tokenInfo = yield AzureCliCredentials.getAccessToken({ + subscriptionIdOrName: this.subscriptionInfo.id, + resource: this.resource + }); + } + catch (err) { + throw new Error(`An error occurred while refreshing the new access ` + + `token:${err.stderr ? err.stderr : err.message}`); + } + } + const result = { + accessToken: this.tokenInfo.accessToken, + tokenType: this.tokenInfo.tokenType, + expiresOn: this.tokenInfo.expiresOn, + tenantId: this.tokenInfo.tenant + }; + return result; + }); } - - if (cookie.hostOnly == null) { // don't reset if already set - cookie.hostOnly = false; + /** + * Signs a request with the Authentication header. + * @param The request to be signed. + */ + signRequest(webResource) { + return __awaiter(this, void 0, void 0, function* () { + const tokenResponse = yield this.getToken(); + webResource.headers.set(ms_rest_js_1.Constants.HeaderConstants.AUTHORIZATION, `${tokenResponse.tokenType} ${tokenResponse.accessToken}`); + return Promise.resolve(webResource); + }); } - - } else { - cookie.hostOnly = true; - cookie.domain = host; - } - - //S5.2.4 If the attribute-value is empty or if the first character of the - //attribute-value is not %x2F ("/"): - //Let cookie-path be the default-path. - if (!cookie.path || cookie.path[0] !== '/') { - cookie.path = defaultPath(context.pathname); - cookie.pathIsDefault = true; - } - - // S5.3 step 8: NOOP; secure attribute - // S5.3 step 9: NOOP; httpOnly attribute - - // S5.3 step 10 - if (options.http === false && cookie.httpOnly) { - err = new Error("Cookie is HttpOnly and this isn't an HTTP API"); - return cb(options.ignoreError ? null : err); - } - - var store = this.store; - - if (!store.updateCookie) { - store.updateCookie = function(oldCookie, newCookie, cb) { - this.putCookie(newCookie, cb); - }; - } - - function withCookie(err, oldCookie) { - if (err) { - return cb(err); + _hasTokenExpired() { + let result = true; + const now = Math.floor(Date.now() / 1000); + if (this.tokenInfo.expiresOn && + this.tokenInfo.expiresOn instanceof Date && + Math.floor(this.tokenInfo.expiresOn.getTime() / 1000) - now > this._tokenRenewalMarginInSeconds) { + result = false; + } + return result; } - - var next = function(err) { - if (err) { - return cb(err); - } else { - cb(null, cookie); - } - }; - - if (oldCookie) { - // S5.3 step 11 - "If the cookie store contains a cookie with the same name, - // domain, and path as the newly created cookie:" - if (options.http === false && oldCookie.httpOnly) { // step 11.2 - err = new Error("old Cookie is HttpOnly and this isn't an HTTP API"); - return cb(options.ignoreError ? null : err); - } - cookie.creation = oldCookie.creation; // step 11.3 - cookie.creationIndex = oldCookie.creationIndex; // preserve tie-breaker - cookie.lastAccessed = now; - // Step 11.4 (delete cookie) is implied by just setting the new one: - store.updateCookie(oldCookie, cookie, next); // step 12 - - } else { - cookie.creation = cookie.lastAccessed = now; - store.putCookie(cookie, next); // step 12 + _hasSubscriptionChanged() { + return this.subscriptionInfo.id !== this.tokenInfo.subscription; } - } - - store.findCookie(cookie.domain, cookie.path, cookie.key, withCookie); -}; - -// RFC6365 S5.4 -CAN_BE_SYNC.push('getCookies'); -CookieJar.prototype.getCookies = function(url, options, cb) { - var context = getCookieContext(url); - if (options instanceof Function) { - cb = options; - options = {}; - } - - var host = canonicalDomain(context.hostname); - var path = context.pathname || '/'; - - var secure = options.secure; - if (secure == null && context.protocol && - (context.protocol == 'https:' || context.protocol == 'wss:')) - { - secure = true; - } - - var http = options.http; - if (http == null) { - http = true; - } - - var now = options.now || Date.now(); - var expireCheck = options.expire !== false; - var allPaths = !!options.allPaths; - var store = this.store; - - function matchingCookie(c) { - // "Either: - // The cookie's host-only-flag is true and the canonicalized - // request-host is identical to the cookie's domain. - // Or: - // The cookie's host-only-flag is false and the canonicalized - // request-host domain-matches the cookie's domain." - if (c.hostOnly) { - if (c.domain != host) { - return false; - } - } else { - if (!domainMatch(host, c.domain, false)) { - return false; - } - } - - // "The request-uri's path path-matches the cookie's path." - if (!allPaths && !pathMatch(path, c.path)) { - return false; + _parseToken() { + try { + const base64Url = this.tokenInfo.accessToken.split(".")[1]; + const base64 = decodeURIComponent(Buffer.from(base64Url, "base64").toString("binary").split("").map((c) => { + return "%" + ("00" + c.charCodeAt(0).toString(16)).slice(-2); + }).join("")); + return JSON.parse(base64); + } + catch (err) { + const msg = `An error occurred while parsing the access token: ${err.stack}`; + throw new Error(msg); + } } - - // "If the cookie's secure-only-flag is true, then the request-uri's - // scheme must denote a "secure" protocol" - if (c.secure && !secure) { - return false; + _isAzureResourceManagerEndpoint(newResource, currentResource) { + if (newResource.endsWith("/")) + newResource = newResource.slice(0, -1); + if (currentResource.endsWith("/")) + currentResource = currentResource.slice(0, -1); + return (newResource === "https://management.core.windows.net" && + currentResource === "https://management.azure.com") || + (newResource === "https://management.azure.com" && + currentResource === "https://management.core.windows.net"); } - - // "If the cookie's http-only-flag is true, then exclude the cookie if the - // cookie-string is being generated for a "non-HTTP" API" - if (c.httpOnly && !http) { - return false; + _hasResourceChanged() { + const parsedToken = this._parseToken(); + // normalize the resource string, since it is possible to + // provide a resource without a trailing slash + const currentResource = parsedToken.aud && parsedToken.aud.endsWith("/") + ? parsedToken.aud.slice(0, -1) + : parsedToken.aud; + const newResource = this.resource.endsWith("/") + ? this.resource.slice(0, -1) + : this.resource; + const result = this._isAzureResourceManagerEndpoint(newResource, currentResource) + ? false + : currentResource !== newResource; + return result; } - - // deferred from S5.3 - // non-RFC: allow retention of expired cookies by choice - if (expireCheck && c.expiryTime() <= now) { - store.removeCookie(c.domain, c.path, c.key, function(){}); // result ignored - return false; + /** + * Gets the access token for the default or specified subscription. + * @param options Optional parameters that can be provided to get the access token. + */ + static getAccessToken(options = {}) { + return __awaiter(this, void 0, void 0, function* () { + try { + let cmd = "account get-access-token"; + if (options.subscriptionIdOrName) { + cmd += ` -s "${options.subscriptionIdOrName}"`; + } + if (options.resource) { + cmd += ` --resource ${options.resource}`; + } + const result = yield login_1.execAz(cmd); + result.expiresOn = new Date(result.expiresOn); + return result; + } + catch (err) { + const message = `An error occurred while getting credentials from ` + + `Azure CLI: ${err.stack}`; + throw new Error(message); + } + }); } - - return true; - } - - store.findCookies(host, allPaths ? null : path, function(err,cookies) { - if (err) { - return cb(err); + /** + * Gets the subscription from Azure CLI. + * @param subscriptionIdOrName - The name or id of the subscription for which the information is + * required. + */ + static getSubscription(subscriptionIdOrName) { + return __awaiter(this, void 0, void 0, function* () { + if (subscriptionIdOrName && (typeof subscriptionIdOrName !== "string" || !subscriptionIdOrName.length)) { + throw new Error("'subscriptionIdOrName' must be a non-empty string."); + } + try { + let cmd = "account show"; + if (subscriptionIdOrName) { + cmd += ` -s "${subscriptionIdOrName}"`; + } + const result = yield login_1.execAz(cmd); + return result; + } + catch (err) { + const message = `An error occurred while getting information about the current subscription from ` + + `Azure CLI: ${err.stack}`; + throw new Error(message); + } + }); } - - cookies = cookies.filter(matchingCookie); - - // sorting of S5.4 part 2 - if (options.sort !== false) { - cookies = cookies.sort(cookieCompare); + /** + * Sets the specified subscription as the default subscription for Azure CLI. + * @param subscriptionIdOrName The name or id of the subsciption that needs to be set as the + * default subscription. + */ + static setDefaultSubscription(subscriptionIdOrName) { + return __awaiter(this, void 0, void 0, function* () { + try { + yield login_1.execAz(`account set -s ${subscriptionIdOrName}`); + } + catch (err) { + const message = `An error occurred while setting the current subscription from ` + + `Azure CLI: ${err.stack}`; + throw new Error(message); + } + }); } - - // S5.4 part 3 - var now = new Date(); - cookies.forEach(function(c) { - c.lastAccessed = now; - }); - // TODO persist lastAccessed - - cb(null,cookies); - }); -}; - -CAN_BE_SYNC.push('getCookieString'); -CookieJar.prototype.getCookieString = function(/*..., cb*/) { - var args = Array.prototype.slice.call(arguments,0); - var cb = args.pop(); - var next = function(err,cookies) { - if (err) { - cb(err); - } else { - cb(null, cookies - .sort(cookieCompare) - .map(function(c){ - return c.cookieString(); - }) - .join('; ')); + /** + * Returns a list of all the subscriptions from Azure CLI. + * @param options Optional parameters that can be provided while listing all the subcriptions. + */ + static listAllSubscriptions(options = {}) { + return __awaiter(this, void 0, void 0, function* () { + let subscriptionList = []; + try { + let cmd = "account list"; + if (options.all) { + cmd += " --all"; + } + if (options.refresh) { + cmd += "--refresh"; + } + subscriptionList = yield login_1.execAz(cmd); + if (subscriptionList && subscriptionList.length) { + for (const sub of subscriptionList) { + if (sub.cloudName) { + sub.environmentName = sub.cloudName; + delete sub.cloudName; + } + } + } + return subscriptionList; + } + catch (err) { + const message = `An error occurred while getting a list of all the subscription from ` + + `Azure CLI: ${err.stack}`; + throw new Error(message); + } + }); } - }; - args.push(next); - this.getCookies.apply(this,args); -}; - -CAN_BE_SYNC.push('getSetCookieStrings'); -CookieJar.prototype.getSetCookieStrings = function(/*..., cb*/) { - var args = Array.prototype.slice.call(arguments,0); - var cb = args.pop(); - var next = function(err,cookies) { - if (err) { - cb(err); - } else { - cb(null, cookies.map(function(c){ - return c.toString(); - })); + /** + * Provides credentials that can be used by the JS SDK to interact with Azure via azure cli. + * **Pre-requisite** + * - **install azure-cli** . For more information see + * {@link https://docs.microsoft.com/en-us/cli/azure/install-azure-cli?view=azure-cli-latest Install Azure CLI} + * - **login via `az login`** + * @param options - Optional parameters that can be provided while creating AzureCliCredentials. + */ + static create(options = {}) { + return __awaiter(this, void 0, void 0, function* () { + const [subscriptinInfo, accessToken] = yield Promise.all([ + AzureCliCredentials.getSubscription(options.subscriptionIdOrName), + AzureCliCredentials.getAccessToken(options) + ]); + return new AzureCliCredentials(subscriptinInfo, accessToken, options.resource); + }); } - }; - args.push(next); - this.getCookies.apply(this,args); -}; - -CAN_BE_SYNC.push('serialize'); -CookieJar.prototype.serialize = function(cb) { - var type = this.store.constructor.name; - if (type === 'Object') { - type = null; - } - - // update README.md "Serialization Format" if you change this, please! - var serialized = { - // The version of tough-cookie that serialized this jar. Generally a good - // practice since future versions can make data import decisions based on - // known past behavior. When/if this matters, use `semver`. - version: 'tough-cookie@'+VERSION, - - // add the store type, to make humans happy: - storeType: type, +} +exports.AzureCliCredentials = AzureCliCredentials; +//# sourceMappingURL=azureCliCredentials.js.map - // CookieJar configuration: - rejectPublicSuffixes: !!this.rejectPublicSuffixes, +/***/ }), - // this gets filled from getAllCookies: - cookies: [] - }; +/***/ 80450: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (!(this.store.getAllCookies && - typeof this.store.getAllCookies === 'function')) - { - return cb(new Error('store does not support getAllCookies and cannot be serialized')); - } +"use strict"; - this.store.getAllCookies(function(err,cookies) { - if (err) { - return cb(err); +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tokenCredentialsBase_1 = __nccwpck_require__(5558); +const authConstants_1 = __nccwpck_require__(26700); +class DeviceTokenCredentials extends tokenCredentialsBase_1.TokenCredentialsBase { + /** + * Creates a new DeviceTokenCredentials object that gets a new access token using userCodeInfo (contains user_code, device_code) + * for authenticating user on device. + * + * When this credential is used, the script will provide a url and code. The user needs to copy the url and the code, paste it + * in a browser and authenticate over there. If successful, the script will get the access token. + * + * @constructor + * @param {string} [clientId] The active directory application client id. + * @param {string} [domain] The domain or tenant id containing this application. Default value is "common" + * @param {string} [username] The user name for account in the form: "user@example.com". + * @param {string} [tokenAudience] The audience for which the token is requested. Valid values are 'graph', 'batch', or any other resource like 'https://vault.azure.net/'. + * If tokenAudience is 'graph' then domain should also be provided and its value should not be the default 'common' tenant. It must be a string (preferrably in a guid format). + * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} + * for an example. + * @param {Environment} [environment] The azure environment to authenticate with. Default environment is "Azure" popularly known as "Public Azure Cloud". + * @param {object} [tokenCache] The token cache. Default value is the MemoryCache object from adal. + */ + constructor(clientId, domain, username, tokenAudience, environment, tokenCache) { + if (!username) { + username = "user@example.com"; + } + if (!domain) { + domain = authConstants_1.AuthConstants.AAD_COMMON_TENANT; + } + if (!clientId) { + clientId = authConstants_1.AuthConstants.DEFAULT_ADAL_CLIENT_ID; + } + super(clientId, domain, tokenAudience, environment, tokenCache); + this.username = username; } + getToken() { + // For device auth, this is just getTokenFromCache. + return this.getTokenFromCache(this.username); + } +} +exports.DeviceTokenCredentials = DeviceTokenCredentials; +//# sourceMappingURL=deviceTokenCredentials.js.map - serialized.cookies = cookies.map(function(cookie) { - // convert to serialized 'raw' cookies - cookie = (cookie instanceof Cookie) ? cookie.toJSON() : cookie; - - // Remove the index so new ones get assigned during deserialization - delete cookie.creationIndex; - - return cookie; - }); - - return cb(null, serialized); - }); -}; +/***/ }), -// well-known name that JSON.stringify calls -CookieJar.prototype.toJSON = function() { - return this.serializeSync(); -}; +/***/ 23699: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -// use the class method CookieJar.deserialize instead of calling this directly -CAN_BE_SYNC.push('_importCookies'); -CookieJar.prototype._importCookies = function(serialized, cb) { - var jar = this; - var cookies = serialized.cookies; - if (!cookies || !Array.isArray(cookies)) { - return cb(new Error('serialized jar has no cookies array')); - } - cookies = cookies.slice(); // do not modify the original +"use strict"; - function putNext(err) { - if (err) { - return cb(err); +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +Object.defineProperty(exports, "__esModule", ({ value: true })); +const applicationTokenCredentials_1 = __nccwpck_require__(35216); +const applicationTokenCertificateCredentials_1 = __nccwpck_require__(2265); +const deviceTokenCredentials_1 = __nccwpck_require__(80450); +const msiAppServiceTokenCredentials_1 = __nccwpck_require__(68376); +const msiTokenCredentials_1 = __nccwpck_require__(6182); +const msiVmTokenCredentials_1 = __nccwpck_require__(73287); +const tokenCredentialsBase_1 = __nccwpck_require__(5558); +const userTokenCredentials_1 = __nccwpck_require__(14502); +const adal_node_1 = __nccwpck_require__(35894); +function createAuthenticator(credentials) { + const convertedCredentials = _convert(credentials); + const authenticator = _createAuthenticatorMapper(convertedCredentials); + return authenticator; +} +exports.createAuthenticator = createAuthenticator; +function _convert(credentials) { + if (credentials instanceof msiAppServiceTokenCredentials_1.MSIAppServiceTokenCredentials) { + return new msiAppServiceTokenCredentials_1.MSIAppServiceTokenCredentials({ + msiEndpoint: credentials.msiEndpoint, + msiSecret: credentials.msiSecret, + msiApiVersion: credentials.msiApiVersion, + resource: credentials.resource + }); } - - if (!cookies.length) { - return cb(err, jar); + else if (credentials instanceof msiVmTokenCredentials_1.MSIVmTokenCredentials) { + return new msiVmTokenCredentials_1.MSIVmTokenCredentials({ + resource: credentials.resource, + msiEndpoint: credentials.msiEndpoint + }); } - - var cookie; - try { - cookie = fromJSON(cookies.shift()); - } catch (e) { - return cb(e); + else if (credentials instanceof msiTokenCredentials_1.MSITokenCredentials) { + throw new Error("MSI-credentials not one of: MSIVmTokenCredentials, MSIAppServiceTokenCredentials"); } - - if (cookie === null) { - return putNext(null); // skip this cookie + else { + return credentials; } +} +function _createAuthenticatorMapper(credentials) { + return (challenge) => new Promise((resolve, reject) => { + // Function to take token Response and format a authorization value + const _formAuthorizationValue = (err, tokenResponse) => { + if (err) { + return reject(err); + } + if (tokenResponse.error) { + return reject(tokenResponse.error); + } + tokenResponse = tokenResponse; + // Calculate the value to be set in the request's Authorization header and resume the call. + const authorizationValue = tokenResponse.tokenType + " " + tokenResponse.accessToken; + return resolve(authorizationValue); + }; + // Create a new authentication context. + if (credentials instanceof tokenCredentialsBase_1.TokenCredentialsBase) { + const context = new adal_node_1.AuthenticationContext(challenge.authorization, true, credentials.authContext && credentials.authContext.cache); + if (credentials instanceof applicationTokenCredentials_1.ApplicationTokenCredentials) { + return context.acquireTokenWithClientCredentials(challenge.resource, credentials.clientId, credentials.secret, _formAuthorizationValue); + } + else if (credentials instanceof applicationTokenCertificateCredentials_1.ApplicationTokenCertificateCredentials) { + return context.acquireTokenWithClientCertificate(challenge.resource, credentials.clientId, credentials.certificate, credentials.thumbprint, _formAuthorizationValue); + } + else if (credentials instanceof userTokenCredentials_1.UserTokenCredentials) { + return context.acquireTokenWithUsernamePassword(challenge.resource, credentials.username, credentials.password, credentials.clientId, _formAuthorizationValue); + } + else if (credentials instanceof deviceTokenCredentials_1.DeviceTokenCredentials) { + return context.acquireToken(challenge.resource, credentials.username, credentials.clientId, _formAuthorizationValue); + } + } + else if (credentials instanceof msiTokenCredentials_1.MSITokenCredentials) { + return credentials.getToken(); + } + else { + return reject(new Error("credentials must be one of: ApplicationTokenCredentials, UserTokenCredentials, " + + "DeviceTokenCredentials, MSITokenCredentials")); + } + }); +} +//# sourceMappingURL=keyVaultFactory.js.map - jar.store.putCookie(cookie, putNext); - } +/***/ }), - putNext(); -}; +/***/ 68376: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { -CookieJar.deserialize = function(strOrObj, store, cb) { - if (arguments.length !== 3) { - // store is optional - cb = store; - store = null; - } +"use strict"; - var serialized; - if (typeof strOrObj === 'string') { - serialized = jsonParse(strOrObj); - if (serialized instanceof Error) { - return cb(serialized); +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const msiTokenCredentials_1 = __nccwpck_require__(6182); +const ms_rest_js_1 = __nccwpck_require__(30812); +/** + * @class MSIAppServiceTokenCredentials + */ +class MSIAppServiceTokenCredentials extends msiTokenCredentials_1.MSITokenCredentials { + /** + * Creates an instance of MSIAppServiceTokenCredentials. + * @param {string} [options.msiEndpoint] - The local URL from which your app can request tokens. + * Either provide this parameter or set the environment variable `MSI_ENDPOINT`. + * For example: `MSI_ENDPOINT="http://127.0.0.1:41741/MSI/token/"` + * @param {string} [options.msiSecret] - The secret used in communication between your code and the local MSI agent. + * Either provide this parameter or set the environment variable `MSI_SECRET`. + * For example: `MSI_SECRET="69418689F1E342DD946CB82994CDA3CB"` + * @param {string} [options.resource] - The resource uri or token audience for which the token is needed. + * For e.g. it can be: + * - resource management endpoint "https://management.azure.com/" (default) + * - management endpoint "https://management.core.windows.net/" + * @param {string} [options.msiApiVersion] - The api-version of the local MSI agent. Default value is "2017-09-01". + */ + constructor(options) { + if (!options) + options = {}; + super(options); + options.msiEndpoint = options.msiEndpoint || process.env["MSI_ENDPOINT"]; + options.msiSecret = options.msiSecret || process.env["MSI_SECRET"]; + if (!options.msiEndpoint || (options.msiEndpoint && typeof options.msiEndpoint.valueOf() !== "string")) { + throw new Error('Either provide "msiEndpoint" as a property of the "options" object ' + + 'or set the environment variable "MSI_ENDPOINT" and it must be of type "string".'); + } + if (!options.msiSecret || (options.msiSecret && typeof options.msiSecret.valueOf() !== "string")) { + throw new Error('Either provide "msiSecret" as a property of the "options" object ' + + 'or set the environment variable "MSI_SECRET" and it must be of type "string".'); + } + if (!options.msiApiVersion) { + options.msiApiVersion = "2017-09-01"; + } + else if (typeof options.msiApiVersion.valueOf() !== "string") { + throw new Error("msiApiVersion must be a uri."); + } + this.msiEndpoint = options.msiEndpoint; + this.msiSecret = options.msiSecret; + this.msiApiVersion = options.msiApiVersion; } - } else { - serialized = strOrObj; - } - - var jar = new CookieJar(store, serialized.rejectPublicSuffixes); - jar._importCookies(serialized, function(err) { - if (err) { - return cb(err); + /** + * Prepares and sends a GET request to a service endpoint indicated by the app service, which responds with the access token. + * @return {Promise} Promise with the tokenResponse (tokenType and accessToken are the two important properties). + */ + getToken() { + return __awaiter(this, void 0, void 0, function* () { + const reqOptions = this.prepareRequestOptions(); + let opRes; + let result; + opRes = yield this._httpClient.sendRequest(reqOptions); + if (opRes.bodyAsText === undefined || opRes.bodyAsText.indexOf("ExceptionMessage") !== -1) { + throw new Error(`MSI: Failed to retrieve a token from "${reqOptions.url}" with an error: ${opRes.bodyAsText}`); + } + result = this.parseTokenResponse(opRes.bodyAsText); + if (!result.tokenType) { + throw new Error(`Invalid token response, did not find tokenType. Response body is: ${opRes.bodyAsText}`); + } + else if (!result.accessToken) { + throw new Error(`Invalid token response, did not find accessToken. Response body is: ${opRes.bodyAsText}`); + } + return result; + }); } - cb(null, jar); - }); -}; - -CookieJar.deserializeSync = function(strOrObj, store) { - var serialized = typeof strOrObj === 'string' ? - JSON.parse(strOrObj) : strOrObj; - var jar = new CookieJar(store, serialized.rejectPublicSuffixes); + prepareRequestOptions() { + const endpoint = this.msiEndpoint.endsWith("/") ? this.msiEndpoint : `${this.msiEndpoint}/`; + const resource = encodeURIComponent(this.resource); + const getUrl = `${endpoint}?resource=${resource}&api-version=${this.msiApiVersion}`; + const reqOptions = { + url: getUrl, + headers: { + "secret": this.msiSecret + }, + method: "GET" + }; + const webResource = new ms_rest_js_1.WebResource(); + return webResource.prepare(reqOptions); + } +} +exports.MSIAppServiceTokenCredentials = MSIAppServiceTokenCredentials; +//# sourceMappingURL=msiAppServiceTokenCredentials.js.map - // catch this mistake early: - if (!jar.store.synchronous) { - throw new Error('CookieJar store is not synchronous; use async API instead.'); - } +/***/ }), - jar._importCookiesSync(serialized); - return jar; -}; -CookieJar.fromJSON = CookieJar.deserializeSync; +/***/ 6182: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { -CookieJar.prototype.clone = function(newStore, cb) { - if (arguments.length === 1) { - cb = newStore; - newStore = null; - } +"use strict"; - this.serialize(function(err,serialized) { - if (err) { - return cb(err); - } - CookieJar.deserialize(serialized, newStore, cb); - }); +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); }; - -CAN_BE_SYNC.push('removeAllCookies'); -CookieJar.prototype.removeAllCookies = function(cb) { - var store = this.store; - - // Check that the store implements its own removeAllCookies(). The default - // implementation in Store will immediately call the callback with a "not - // implemented" Error. - if (store.removeAllCookies instanceof Function && - store.removeAllCookies !== Store.prototype.removeAllCookies) - { - return store.removeAllCookies(cb); - } - - store.getAllCookies(function(err, cookies) { - if (err) { - return cb(err); +Object.defineProperty(exports, "__esModule", ({ value: true })); +const ms_rest_js_1 = __nccwpck_require__(30812); +const authConstants_1 = __nccwpck_require__(26700); +/** + * @class MSITokenCredentials - Provides information about managed service identity token credentials. + * This object can only be used to acquire token on a virtual machine provisioned in Azure with managed service identity. + */ +class MSITokenCredentials { + /** + * Creates an instance of MSITokenCredentials. + * @param {object} [options] - Optional parameters + * @param {string} [options.resource] - The resource uri or token audience for which the token is needed. + * For e.g. it can be: + * - resource management endpoint "https://management.azure.com/"(default) + * - management endpoint "https://management.core.windows.net/" + */ + constructor(options) { + if (!options) + options = {}; + if (!options.resource) { + options.resource = authConstants_1.AuthConstants.RESOURCE_MANAGER_ENDPOINT; + } + else if (typeof options.resource.valueOf() !== "string") { + throw new Error("resource must be a uri."); + } + this.resource = options.resource; + this._httpClient = options.httpClient || new ms_rest_js_1.DefaultHttpClient(); } - - if (cookies.length === 0) { - return cb(null); + /** + * Parses a tokenResponse json string into a object, and converts properties on the first level to camelCase. + * This method tries to standardize the tokenResponse + * @param {string} body A json string + * @return {object} [tokenResponse] The tokenResponse (tokenType and accessToken are the two important properties). + */ + parseTokenResponse(body) { + // Docs show different examples of possible MSI responses for different services. https://docs.microsoft.com/en-us/azure/active-directory/managed-service-identity/overview + // expires_on - is a Date like string in this doc + // - https://docs.microsoft.com/en-us/azure/app-service/app-service-managed-service-identity#rest-protocol-examples + // In other doc it is stringified number. + // - https://docs.microsoft.com/en-us/azure/active-directory/managed-service-identity/tutorial-linux-vm-access-arm#get-an-access-token-using-the-vms-identity-and-use-it-to-call-resource-manager + const parsedBody = JSON.parse(body); + parsedBody.accessToken = parsedBody["access_token"]; + delete parsedBody["access_token"]; + parsedBody.tokenType = parsedBody["token_type"]; + delete parsedBody["token_type"]; + if (parsedBody["refresh_token"]) { + parsedBody.refreshToken = parsedBody["refresh_token"]; + delete parsedBody["refresh_token"]; + } + if (parsedBody["expires_in"]) { + parsedBody.expiresIn = parsedBody["expires_in"]; + if (typeof parsedBody["expires_in"] === "string") { + // normal number as a string '1504130527' + parsedBody.expiresIn = parseInt(parsedBody["expires_in"], 10); + } + delete parsedBody["expires_in"]; + } + if (parsedBody["not_before"]) { + parsedBody.notBefore = parsedBody["not_before"]; + if (typeof parsedBody["not_before"] === "string") { + // normal number as a string '1504130527' + parsedBody.notBefore = parseInt(parsedBody["not_before"], 10); + } + delete parsedBody["not_before"]; + } + if (parsedBody["expires_on"]) { + parsedBody.expiresOn = parsedBody["expires_on"]; + if (typeof parsedBody["expires_on"] === "string") { + // possibly a Date string '09/14/2017 00:00:00 PM +00:00' + if (parsedBody["expires_on"].includes(":") || parsedBody["expires_on"].includes("/")) { + parsedBody.expiresOn = new Date(parsedBody["expires_on"], 10); + } + else { + // normal number as a string '1504130527' + parsedBody.expiresOn = new Date(parseInt(parsedBody["expires_on"], 10)); + } + } + delete parsedBody["expires_on"]; + } + return parsedBody; } + /** + * Signs a request with the Authentication header. + * + * @param {webResource} The WebResource to be signed. + * @return {Promise} Promise with signed WebResource. + */ + signRequest(webResource) { + return __awaiter(this, void 0, void 0, function* () { + const tokenResponse = yield this.getToken(); + webResource.headers.set(ms_rest_js_1.Constants.HeaderConstants.AUTHORIZATION, `${tokenResponse.tokenType} ${tokenResponse.accessToken}`); + return Promise.resolve(webResource); + }); + } +} +exports.MSITokenCredentials = MSITokenCredentials; +//# sourceMappingURL=msiTokenCredentials.js.map - var completedCount = 0; - var removeErrors = []; - - function removeCookieCb(removeErr) { - if (removeErr) { - removeErrors.push(removeErr); - } +/***/ }), - completedCount++; +/***/ 73287: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - if (completedCount === cookies.length) { - return cb(removeErrors.length ? removeErrors[0] : null); - } - } +"use strict"; - cookies.forEach(function(cookie) { - store.removeCookie(cookie.domain, cookie.path, cookie.key, removeCookieCb); +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); }); - }); -}; - -CookieJar.prototype._cloneSync = syncWrap('clone'); -CookieJar.prototype.cloneSync = function(newStore) { - if (!newStore.synchronous) { - throw new Error('CookieJar clone destination store is not synchronous; use async API instead.'); - } - return this._cloneSync(newStore); }; - -// Use a closure to provide a true imperative API for synchronous stores. -function syncWrap(method) { - return function() { - if (!this.store.synchronous) { - throw new Error('CookieJar store is not synchronous; use async API instead.'); +Object.defineProperty(exports, "__esModule", ({ value: true })); +const msiTokenCredentials_1 = __nccwpck_require__(6182); +const ms_rest_js_1 = __nccwpck_require__(30812); +/** + * @class MSIVmTokenCredentials + */ +class MSIVmTokenCredentials extends msiTokenCredentials_1.MSITokenCredentials { + constructor(options) { + if (!options) + options = {}; + super(options); + if (!options.msiEndpoint) { + options.msiEndpoint = "http://169.254.169.254/metadata/identity/oauth2/token"; + } + else if (typeof options.msiEndpoint !== "string") { + throw new Error("msiEndpoint must be a string."); + } + const urlBuilder = ms_rest_js_1.URLBuilder.parse(options.msiEndpoint); + if (!urlBuilder.getScheme()) { + options.msiEndpoint = `http://${options.msiEndpoint}`; + } + if (!options.apiVersion) { + options.apiVersion = "2018-02-01"; + } + else if (typeof options.apiVersion !== "string") { + throw new Error("apiVersion must be a string."); + } + if (!options.httpMethod) { + options.httpMethod = "GET"; + } + this.apiVersion = options.apiVersion; + this.msiEndpoint = options.msiEndpoint; + this.httpMethod = options.httpMethod; + this.objectId = options.objectId; + this.clientId = options.clientId; + this.identityId = options.identityId; } - - var args = Array.prototype.slice.call(arguments); - var syncErr, syncResult; - args.push(function syncCb(err, result) { - syncErr = err; - syncResult = result; - }); - this[method].apply(this, args); - - if (syncErr) { - throw syncErr; + /** + * Prepares and sends a POST request to a service endpoint hosted on the Azure VM, which responds with the access token. + * @return {Promise} Promise with the tokenResponse (tokenType and accessToken are the two important properties). + */ + getToken() { + return __awaiter(this, void 0, void 0, function* () { + const reqOptions = this.prepareRequestOptions(); + let opRes; + let result; + opRes = yield this._httpClient.sendRequest(reqOptions); + result = this.parseTokenResponse(opRes.bodyAsText); + if (!result.tokenType) { + throw new Error(`Invalid token response, did not find tokenType. Response body is: ${opRes.bodyAsText}`); + } + else if (!result.accessToken) { + throw new Error(`Invalid token response, did not find accessToken. Response body is: ${opRes.bodyAsText}`); + } + return result; + }); + } + prepareRequestOptions() { + const reqOptions = { + url: this.msiEndpoint, + headers: { + "Content-Type": "application/x-www-form-urlencoded; charset=UTF-8", + "Metadata": "true" + }, + method: this.httpMethod, + queryParameters: { + "api-version": this.apiVersion, + "resource": this.resource, + "object_id": this.objectId, + "client_id": this.clientId, + "mi_res_id": this.identityId + } + }; + const webResource = new ms_rest_js_1.WebResource(); + return webResource.prepare(reqOptions); } - return syncResult; - }; } - -// wrap all declared CAN_BE_SYNC methods in the sync wrapper -CAN_BE_SYNC.forEach(function(method) { - CookieJar.prototype[method+'Sync'] = syncWrap(method); -}); - -exports.version = VERSION; -exports.CookieJar = CookieJar; -exports.Cookie = Cookie; -exports.Store = Store; -exports.MemoryCookieStore = MemoryCookieStore; -exports.parseDate = parseDate; -exports.formatDate = formatDate; -exports.parse = parse; -exports.fromJSON = fromJSON; -exports.domainMatch = domainMatch; -exports.defaultPath = defaultPath; -exports.pathMatch = pathMatch; -exports.getPublicSuffix = pubsuffix.getPublicSuffix; -exports.cookieCompare = cookieCompare; -exports.permuteDomain = __nccwpck_require__(91287).permuteDomain; -exports.permutePath = permutePath; -exports.canonicalDomain = canonicalDomain; - +exports.MSIVmTokenCredentials = MSIVmTokenCredentials; +//# sourceMappingURL=msiVmTokenCredentials.js.map /***/ }), -/***/ 96868: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 5558: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; -/*! - * Copyright (c) 2015, Salesforce.com, Inc. - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * 3. Neither the name of Salesforce.com nor the names of its contributors may - * be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ - -var Store = __nccwpck_require__(5340)/* .Store */ .y; -var permuteDomain = __nccwpck_require__(91287).permuteDomain; -var pathMatch = __nccwpck_require__(55319)/* .pathMatch */ .U; -var util = __nccwpck_require__(31669); -function MemoryCookieStore() { - Store.call(this); - this.idx = {}; +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const ms_rest_js_1 = __nccwpck_require__(30812); +const ms_rest_azure_env_1 = __nccwpck_require__(40787); +const adal_node_1 = __nccwpck_require__(35894); +class TokenCredentialsBase { + constructor(clientId, domain, tokenAudience, environment = ms_rest_azure_env_1.Environment.AzureCloud, tokenCache = new adal_node_1.MemoryCache()) { + this.clientId = clientId; + this.domain = domain; + this.tokenAudience = tokenAudience; + this.environment = environment; + this.tokenCache = tokenCache; + if (!clientId || typeof clientId.valueOf() !== "string") { + throw new Error("clientId must be a non empty string."); + } + if (!domain || typeof domain.valueOf() !== "string") { + throw new Error("domain must be a non empty string."); + } + if (this.tokenAudience === "graph" && this.domain.toLowerCase() === "common") { + throw new Error(`${"If the tokenAudience is specified as \"graph\" then \"domain\" cannot be defaulted to \"commmon\" tenant.\ + It must be the actual tenant (preferrably a string in a guid format)."}`); + } + const authorityUrl = this.environment.activeDirectoryEndpointUrl + this.domain; + this.authContext = new adal_node_1.AuthenticationContext(authorityUrl, this.environment.validateAuthority, this.tokenCache); + } + getActiveDirectoryResourceId() { + let resource = this.environment.activeDirectoryResourceId; + if (this.tokenAudience) { + resource = this.tokenAudience; + if (this.tokenAudience.toLowerCase() === "graph") { + resource = this.environment.activeDirectoryGraphResourceId; + } + else if (this.tokenAudience.toLowerCase() === "batch") { + resource = this.environment.batchResourceId; + } + } + return resource; + } + getTokenFromCache(username) { + const self = this; + const resource = this.getActiveDirectoryResourceId(); + return new Promise((resolve, reject) => { + self.authContext.acquireToken(resource, username, self.clientId, (error, tokenResponse) => { + if (error) { + return reject(error); + } + if (tokenResponse.error || tokenResponse.errorDescription) { + return reject(tokenResponse); + } + return resolve(tokenResponse); + }); + }); + } + /** + * Signs a request with the Authentication header. + * + * @param {webResource} The WebResource to be signed. + * @param {function(error)} callback The callback function. + * @return {undefined} + */ + signRequest(webResource) { + return __awaiter(this, void 0, void 0, function* () { + const tokenResponse = yield this.getToken(); + webResource.headers.set(ms_rest_js_1.Constants.HeaderConstants.AUTHORIZATION, `${tokenResponse.tokenType} ${tokenResponse.accessToken}`); + return Promise.resolve(webResource); + }); + } } -util.inherits(MemoryCookieStore, Store); -exports.m = MemoryCookieStore; -MemoryCookieStore.prototype.idx = null; +exports.TokenCredentialsBase = TokenCredentialsBase; +//# sourceMappingURL=tokenCredentialsBase.js.map -// Since it's just a struct in RAM, this Store is synchronous -MemoryCookieStore.prototype.synchronous = true; +/***/ }), -// force a default depth: -MemoryCookieStore.prototype.inspect = function() { - return "{ idx: "+util.inspect(this.idx, false, 2)+' }'; -}; - -// Use the new custom inspection symbol to add the custom inspect function if -// available. -if (util.inspect.custom) { - MemoryCookieStore.prototype[util.inspect.custom] = MemoryCookieStore.prototype.inspect; -} - -MemoryCookieStore.prototype.findCookie = function(domain, path, key, cb) { - if (!this.idx[domain]) { - return cb(null,undefined); - } - if (!this.idx[domain][path]) { - return cb(null,undefined); - } - return cb(null,this.idx[domain][path][key]||null); -}; - -MemoryCookieStore.prototype.findCookies = function(domain, path, cb) { - var results = []; - if (!domain) { - return cb(null,[]); - } - - var pathMatcher; - if (!path) { - // null means "all paths" - pathMatcher = function matchAll(domainIndex) { - for (var curPath in domainIndex) { - var pathIndex = domainIndex[curPath]; - for (var key in pathIndex) { - results.push(pathIndex[key]); - } - } - }; - - } else { - pathMatcher = function matchRFC(domainIndex) { - //NOTE: we should use path-match algorithm from S5.1.4 here - //(see : https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/canonical_cookie.cc#L299) - Object.keys(domainIndex).forEach(function (cookiePath) { - if (pathMatch(path, cookiePath)) { - var pathIndex = domainIndex[cookiePath]; - - for (var key in pathIndex) { - results.push(pathIndex[key]); - } - } - }); - }; - } - - var domains = permuteDomain(domain) || [domain]; - var idx = this.idx; - domains.forEach(function(curDomain) { - var domainIndex = idx[curDomain]; - if (!domainIndex) { - return; - } - pathMatcher(domainIndex); - }); - - cb(null,results); -}; - -MemoryCookieStore.prototype.putCookie = function(cookie, cb) { - if (!this.idx[cookie.domain]) { - this.idx[cookie.domain] = {}; - } - if (!this.idx[cookie.domain][cookie.path]) { - this.idx[cookie.domain][cookie.path] = {}; - } - this.idx[cookie.domain][cookie.path][cookie.key] = cookie; - cb(null); -}; - -MemoryCookieStore.prototype.updateCookie = function(oldCookie, newCookie, cb) { - // updateCookie() may avoid updating cookies that are identical. For example, - // lastAccessed may not be important to some stores and an equality - // comparison could exclude that field. - this.putCookie(newCookie,cb); -}; - -MemoryCookieStore.prototype.removeCookie = function(domain, path, key, cb) { - if (this.idx[domain] && this.idx[domain][path] && this.idx[domain][path][key]) { - delete this.idx[domain][path][key]; - } - cb(null); -}; - -MemoryCookieStore.prototype.removeCookies = function(domain, path, cb) { - if (this.idx[domain]) { - if (path) { - delete this.idx[domain][path]; - } else { - delete this.idx[domain]; - } - } - return cb(null); -}; - -MemoryCookieStore.prototype.removeAllCookies = function(cb) { - this.idx = {}; - return cb(null); -} - -MemoryCookieStore.prototype.getAllCookies = function(cb) { - var cookies = []; - var idx = this.idx; - - var domains = Object.keys(idx); - domains.forEach(function(domain) { - var paths = Object.keys(idx[domain]); - paths.forEach(function(path) { - var keys = Object.keys(idx[domain][path]); - keys.forEach(function(key) { - if (key !== null) { - cookies.push(idx[domain][path][key]); - } - }); - }); - }); - - // Sort by creationIndex so deserializing retains the creation order. - // When implementing your own store, this SHOULD retain the order too - cookies.sort(function(a,b) { - return (a.creationIndex||0) - (b.creationIndex||0); - }); - - cb(null, cookies); -}; - - -/***/ }), - -/***/ 55319: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; -/*! - * Copyright (c) 2015, Salesforce.com, Inc. - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * 3. Neither the name of Salesforce.com nor the names of its contributors may - * be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ - -/* - * "A request-path path-matches a given cookie-path if at least one of the - * following conditions holds:" - */ -function pathMatch (reqPath, cookiePath) { - // "o The cookie-path and the request-path are identical." - if (cookiePath === reqPath) { - return true; - } - - var idx = reqPath.indexOf(cookiePath); - if (idx === 0) { - // "o The cookie-path is a prefix of the request-path, and the last - // character of the cookie-path is %x2F ("/")." - if (cookiePath.substr(-1) === "/") { - return true; - } - - // " o The cookie-path is a prefix of the request-path, and the first - // character of the request-path that is not included in the cookie- path - // is a %x2F ("/") character." - if (reqPath.substr(cookiePath.length, 1) === "/") { - return true; - } - } - - return false; -} - -exports.U = pathMatch; - - -/***/ }), - -/***/ 91287: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; -/*! - * Copyright (c) 2015, Salesforce.com, Inc. - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * 3. Neither the name of Salesforce.com nor the names of its contributors may - * be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ - -var pubsuffix = __nccwpck_require__(23005); - -// Gives the permutation of all possible domainMatch()es of a given domain. The -// array is in shortest-to-longest order. Handy for indexing. -function permuteDomain (domain) { - var pubSuf = pubsuffix.getPublicSuffix(domain); - if (!pubSuf) { - return null; - } - if (pubSuf == domain) { - return [domain]; - } - - var prefix = domain.slice(0, -(pubSuf.length + 1)); // ".example.com" - var parts = prefix.split('.').reverse(); - var cur = pubSuf; - var permutations = [cur]; - while (parts.length) { - cur = parts.shift() + '.' + cur; - permutations.push(cur); - } - return permutations; -} - -exports.permuteDomain = permuteDomain; - - -/***/ }), - -/***/ 23005: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; -/*! - * Copyright (c) 2018, Salesforce.com, Inc. - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * 3. Neither the name of Salesforce.com nor the names of its contributors may - * be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ - -var psl = __nccwpck_require__(29975); - -function getPublicSuffix(domain) { - return psl.get(domain); -} - -exports.getPublicSuffix = getPublicSuffix; - - -/***/ }), - -/***/ 5340: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; -/*! - * Copyright (c) 2015, Salesforce.com, Inc. - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * 3. Neither the name of Salesforce.com nor the names of its contributors may - * be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ - -/*jshint unused:false */ - -function Store() { -} -exports.y = Store; - -// Stores may be synchronous, but are still required to use a -// Continuation-Passing Style API. The CookieJar itself will expose a "*Sync" -// API that converts from synchronous-callbacks to imperative style. -Store.prototype.synchronous = false; - -Store.prototype.findCookie = function(domain, path, key, cb) { - throw new Error('findCookie is not implemented'); -}; - -Store.prototype.findCookies = function(domain, path, cb) { - throw new Error('findCookies is not implemented'); -}; - -Store.prototype.putCookie = function(cookie, cb) { - throw new Error('putCookie is not implemented'); -}; - -Store.prototype.updateCookie = function(oldCookie, newCookie, cb) { - // recommended default implementation: - // return this.putCookie(newCookie, cb); - throw new Error('updateCookie is not implemented'); -}; - -Store.prototype.removeCookie = function(domain, path, key, cb) { - throw new Error('removeCookie is not implemented'); -}; - -Store.prototype.removeCookies = function(domain, path, cb) { - throw new Error('removeCookies is not implemented'); -}; - -Store.prototype.removeAllCookies = function(cb) { - throw new Error('removeAllCookies is not implemented'); -} - -Store.prototype.getAllCookies = function(cb) { - throw new Error('getAllCookies is not implemented (therefore jar cannot be serialized)'); -}; - - -/***/ }), - -/***/ 88263: -/***/ ((module) => { - -// generated by genversion -module.exports = '2.5.0' - - -/***/ }), - -/***/ 64305: -/***/ ((module) => { - -/** - * Convert array of 16 byte values to UUID string format of the form: - * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX - */ -var byteToHex = []; -for (var i = 0; i < 256; ++i) { - byteToHex[i] = (i + 0x100).toString(16).substr(1); -} - -function bytesToUuid(buf, offset) { - var i = offset || 0; - var bth = byteToHex; - // join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4 - return ([ - bth[buf[i++]], bth[buf[i++]], - bth[buf[i++]], bth[buf[i++]], '-', - bth[buf[i++]], bth[buf[i++]], '-', - bth[buf[i++]], bth[buf[i++]], '-', - bth[buf[i++]], bth[buf[i++]], '-', - bth[buf[i++]], bth[buf[i++]], - bth[buf[i++]], bth[buf[i++]], - bth[buf[i++]], bth[buf[i++]] - ]).join(''); -} - -module.exports = bytesToUuid; - - -/***/ }), - -/***/ 7968: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -// Unique ID creation requires a high quality random # generator. In node.js -// this is pretty straight-forward - we use the crypto API. - -var crypto = __nccwpck_require__(76417); - -module.exports = function nodeRNG() { - return crypto.randomBytes(16); -}; - - -/***/ }), - -/***/ 20621: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var rng = __nccwpck_require__(7968); -var bytesToUuid = __nccwpck_require__(64305); - -function v4(options, buf, offset) { - var i = buf && offset || 0; - - if (typeof(options) == 'string') { - buf = options === 'binary' ? new Array(16) : null; - options = null; - } - options = options || {}; - - var rnds = options.random || (options.rng || rng)(); - - // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` - rnds[6] = (rnds[6] & 0x0f) | 0x40; - rnds[8] = (rnds[8] & 0x3f) | 0x80; - - // Copy bytes to buffer, if provided - if (buf) { - for (var ii = 0; ii < 16; ++ii) { - buf[i + ii] = rnds[ii]; - } - } - - return buf || bytesToUuid(rnds); -} - -module.exports = v4; - - -/***/ }), - -/***/ 2265: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 14502: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -8501,180 +8417,86 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); -const fs_1 = __nccwpck_require__(35747); -const crypto_1 = __nccwpck_require__(76417); -const applicationTokenCredentialsBase_1 = __nccwpck_require__(60968); -const authConstants_1 = __nccwpck_require__(26700); -class ApplicationTokenCertificateCredentials extends applicationTokenCredentialsBase_1.ApplicationTokenCredentialsBase { +const tokenCredentialsBase_1 = __nccwpck_require__(5558); +class UserTokenCredentials extends tokenCredentialsBase_1.TokenCredentialsBase { /** - * Creates a new ApplicationTokenCredentials object. - * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} - * for detailed instructions on creating an Azure Active Directory application. + * Creates a new UserTokenCredentials object. + * * @constructor * @param {string} clientId The active directory application client id. + * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} + * for an example. * @param {string} domain The domain or tenant id containing this application. - * @param {string} certificate A PEM encoded certificate private key. - * @param {string} thumbprint A hex encoded thumbprint of the certificate. + * @param {string} username The user name for the Organization Id account. + * @param {string} password The password for the Organization Id account. * @param {string} [tokenAudience] The audience for which the token is requested. Valid values are 'graph', 'batch', or any other resource like 'https://vault.azure.net/'. * If tokenAudience is 'graph' then domain should also be provided and its value should not be the default 'common' tenant. It must be a string (preferrably in a guid format). * @param {Environment} [environment] The azure environment to authenticate with. * @param {object} [tokenCache] The token cache. Default value is the MemoryCache object from adal. */ - constructor(clientId, domain, certificate, thumbprint, tokenAudience, environment, tokenCache) { - if (!certificate || typeof certificate.valueOf() !== "string") { - throw new Error("certificate must be a non empty string."); - } - if (!thumbprint || typeof thumbprint.valueOf() !== "string") { - throw new Error("thumbprint must be a non empty string."); - } - super(clientId, domain, tokenAudience, environment, tokenCache); - this.certificate = certificate; - this.thumbprint = thumbprint; - } - /** - * Tries to get the token from cache initially. If that is unsuccessfull then it tries to get the token from ADAL. - * @returns {Promise} A promise that resolves to TokenResponse and rejects with an Error. - */ - getToken() { - return __awaiter(this, void 0, void 0, function* () { - try { - const tokenResponse = yield this.getTokenFromCache(); - return tokenResponse; - } - catch (error) { - if (error.message.startsWith(authConstants_1.AuthConstants.SDK_INTERNAL_ERROR)) { - return Promise.reject(error); - } - return new Promise((resolve, reject) => { - const resource = this.getActiveDirectoryResourceId(); - this.authContext.acquireTokenWithClientCertificate(resource, this.clientId, this.certificate, this.thumbprint, (error, tokenResponse) => { - if (error) { - return reject(error); - } - if (tokenResponse.error || tokenResponse.errorDescription) { - return reject(tokenResponse); - } - return resolve(tokenResponse); - }); - }); - } - }); - } - /** - * Creates a new instance of ApplicationTokenCertificateCredentials. - * - * @param clientId The active directory application client id also known as the SPN (ServicePrincipal Name). - * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} - * for an example. - * @param {string} certificateStringOrFilePath A PEM encoded certificate and private key OR an absolute filepath to the .pem file containing that information. For example: - * - CertificateString: "-----BEGIN PRIVATE KEY-----\n\n-----END PRIVATE KEY-----\n-----BEGIN CERTIFICATE-----\n\n-----END CERTIFICATE-----\n" - * - CertificateFilePath: **Absolute** file path of the .pem file. - * @param domain The domain or tenant id containing this application. - * @param options AzureTokenCredentialsOptions - Object representing optional parameters. - * - * @returns ApplicationTokenCertificateCredentials - */ - static create(clientId, certificateStringOrFilePath, domain, options) { - if (!certificateStringOrFilePath || - typeof certificateStringOrFilePath.valueOf() !== "string") { - throw new Error("'certificateStringOrFilePath' must be a non empty string."); + constructor(clientId, domain, username, password, tokenAudience, environment, tokenCache) { + if (!clientId || typeof clientId.valueOf() !== "string") { + throw new Error("clientId must be a non empty string."); } - if (!certificateStringOrFilePath.startsWith("-----BEGIN")) { - certificateStringOrFilePath = fs_1.readFileSync(certificateStringOrFilePath, "utf8"); + if (!domain || typeof domain.valueOf() !== "string") { + throw new Error("domain must be a non empty string."); } - const certificatePattern = /(-+BEGIN CERTIFICATE-+)(\n\r?|\r\n?)([A-Za-z0-9\+\/\n\r]+\=*)(\n\r?|\r\n?)(-+END CERTIFICATE-+)/; - const matchCert = certificateStringOrFilePath.match(certificatePattern); - const rawCertificate = matchCert ? matchCert[3] : ""; - if (!rawCertificate) { - throw new Error("Unable to correctly parse the certificate from the value provided in 'certificateStringOrFilePath' "); + if (!username || typeof username.valueOf() !== "string") { + throw new Error("username must be a non empty string."); } - const thumbprint = crypto_1.createHash("sha1") - .update(Buffer.from(rawCertificate, "base64")) - .digest("hex"); - return new ApplicationTokenCertificateCredentials(clientId, domain, certificateStringOrFilePath, thumbprint, options.tokenAudience, options.environment, options.tokenCache); - } -} -exports.ApplicationTokenCertificateCredentials = ApplicationTokenCertificateCredentials; -//# sourceMappingURL=applicationTokenCertificateCredentials.js.map - -/***/ }), - -/***/ 35216: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const applicationTokenCredentialsBase_1 = __nccwpck_require__(60968); -const authConstants_1 = __nccwpck_require__(26700); -class ApplicationTokenCredentials extends applicationTokenCredentialsBase_1.ApplicationTokenCredentialsBase { - /** - * Creates a new ApplicationTokenCredentials object. - * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} - * for detailed instructions on creating an Azure Active Directory application. - * @constructor - * @param {string} clientId The active directory application client id. - * @param {string} domain The domain or tenant id containing this application. - * @param {string} secret The authentication secret for the application. - * @param {string} [tokenAudience] The audience for which the token is requested. Valid values are 'graph', 'batch', or any other resource like 'https://vault.azure.net/'. - * If tokenAudience is 'graph' then domain should also be provided and its value should not be the default 'common' tenant. It must be a string (preferrably in a guid format). - * @param {Environment} [environment] The azure environment to authenticate with. - * @param {object} [tokenCache] The token cache. Default value is the MemoryCache object from adal. - */ - constructor(clientId, domain, secret, tokenAudience, environment, tokenCache) { - if (!secret || typeof secret.valueOf() !== "string") { - throw new Error("secret must be a non empty string."); + if (!password || typeof password.valueOf() !== "string") { + throw new Error("password must be a non empty string."); } super(clientId, domain, tokenAudience, environment, tokenCache); - this.secret = secret; + this.username = username; + this.password = password; + } + crossCheckUserNameWithToken(username, userIdFromToken) { + // to maintain the casing consistency between "azureprofile.json" and token cache. (RD 1996587) + // use the "userId" here, which should be the same with "username" except the casing. + return (username.toLowerCase() === userIdFromToken.toLowerCase()); } /** - * Tries to get the token from cache initially. If that is unsuccessfull then it tries to get the token from ADAL. - * @returns {Promise} A promise that resolves to TokenResponse and rejects with an Error. + * Tries to get the token from cache initially. If that is unsuccessful then it tries to get the token from ADAL. + * @returns {Promise} + * {object} [tokenResponse] The tokenResponse (tokenType and accessToken are the two important properties). + * @memberof UserTokenCredentials */ getToken() { return __awaiter(this, void 0, void 0, function* () { try { - const tokenResponse = yield this.getTokenFromCache(); - return tokenResponse; + return yield this.getTokenFromCache(this.username); } catch (error) { - if (error.message && - error.message.startsWith(authConstants_1.AuthConstants.SDK_INTERNAL_ERROR)) { - return Promise.reject(error); - } + const self = this; const resource = this.getActiveDirectoryResourceId(); return new Promise((resolve, reject) => { - this.authContext.acquireTokenWithClientCredentials(resource, this.clientId, this.secret, (error, tokenResponse) => { + self.authContext.acquireTokenWithUsernamePassword(resource, self.username, self.password, self.clientId, (error, tokenResponse) => { if (error) { return reject(error); } if (tokenResponse.error || tokenResponse.errorDescription) { return reject(tokenResponse); } - return resolve(tokenResponse); + tokenResponse = tokenResponse; + if (self.crossCheckUserNameWithToken(self.username, tokenResponse.userId)) { + return resolve(tokenResponse); + } + else { + return reject(`The userId "${tokenResponse.userId}" in access token doesn"t match the username "${self.username}" provided during authentication.`); + } }); }); } }); } } -exports.ApplicationTokenCredentials = ApplicationTokenCredentials; -//# sourceMappingURL=applicationTokenCredentials.js.map +exports.UserTokenCredentials = UserTokenCredentials; +//# sourceMappingURL=userTokenCredentials.js.map /***/ }), -/***/ 60968: +/***/ 29325: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -8690,1184 +8512,184 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); -const tokenCredentialsBase_1 = __nccwpck_require__(5558); +const adal = __nccwpck_require__(35894); +const msRest = __nccwpck_require__(30812); +const child_process_1 = __nccwpck_require__(63129); +const fs_1 = __nccwpck_require__(35747); +const ms_rest_azure_env_1 = __nccwpck_require__(40787); +const applicationTokenCredentials_1 = __nccwpck_require__(35216); +const applicationTokenCertificateCredentials_1 = __nccwpck_require__(2265); +const deviceTokenCredentials_1 = __nccwpck_require__(80450); +const userTokenCredentials_1 = __nccwpck_require__(14502); const authConstants_1 = __nccwpck_require__(26700); -class ApplicationTokenCredentialsBase extends tokenCredentialsBase_1.TokenCredentialsBase { - /** - * Creates a new ApplicationTokenCredentials object. - * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} - * for detailed instructions on creating an Azure Active Directory application. - * @constructor - * @param {string} clientId The active directory application client id. - * @param {string} domain The domain or tenant id containing this application. - * @param {string} [tokenAudience] The audience for which the token is requested. Valid values are 'graph', 'batch', or any other resource like 'https://vault.azure.net/'. - * If tokenAudience is 'graph' then domain should also be provided and its value should not be the default 'common' tenant. It must be a string (preferrably in a guid format). - * @param {Environment} [environment] The azure environment to authenticate with. - * @param {object} [tokenCache] The token cache. Default value is the MemoryCache object from adal. - */ - constructor(clientId, domain, tokenAudience, environment, tokenCache) { - super(clientId, domain, tokenAudience, environment, tokenCache); - } - getTokenFromCache() { - const _super = Object.create(null, { - getTokenFromCache: { get: () => super.getTokenFromCache } - }); - return __awaiter(this, void 0, void 0, function* () { - const self = this; - // a thin wrapper over the base implementation. try get token from cache, additionaly clean up cache if required. - try { - const tokenResponse = yield _super.getTokenFromCache.call(this, undefined); - return Promise.resolve(tokenResponse); - } - catch (error) { - // Remove the stale token from the tokencache. ADAL gives the same error message "Entry not found in cache." - // for entry not being present in the cache and for accessToken being expired in the cache. We do not want the token cache - // to contain the expired token, we clean it up here. - const status = yield self.removeInvalidItemsFromCache({ - _clientId: self.clientId - }); - if (status.result) { - return Promise.reject(error); - } - const message = status && status.details && status.details.message - ? status.details.message - : status.details; - return Promise.reject(new Error(authConstants_1.AuthConstants.SDK_INTERNAL_ERROR + - " : " + - "critical failure while removing expired token for service principal from token cache. " + - message)); +const subscriptionUtils_1 = __nccwpck_require__(47813); +const msiVmTokenCredentials_1 = __nccwpck_require__(73287); +const msiAppServiceTokenCredentials_1 = __nccwpck_require__(68376); +/** + * @constant {Array} managementPlaneTokenAudiences - Urls for management plane token + * audience across different azure environments. + */ +const managementPlaneTokenAudiences = [ + "https://management.core.windows.net/", + "https://management.core.chinacloudapi.cn/", + "https://management.core.usgovcloudapi.net/", + "https://management.core.cloudapi.de/", + "https://management.azure.com/", + "https://management.core.windows.net", + "https://management.core.chinacloudapi.cn", + "https://management.core.usgovcloudapi.net", + "https://management.core.cloudapi.de", + "https://management.azure.com" +]; +function turnOnLogging() { + const log = adal.Logging; + log.setLoggingOptions({ + level: 3, + log: function (level, message, error) { + level; + console.info(message); + if (error) { + console.error(error); } - }); - } - /** - * Removes invalid items from token cache. This method is different. Here we never reject in case of error. - * Rather we resolve with an object that says the result is false and error information is provided in - * the details property of the resolved object. This is done to do better error handling in the above function - * where removeInvalidItemsFromCache() is called. - * @param {object} query The query to be used for finding the token for service principal from the cache - * @returns {result: boolean, details?: Error} resultObject with more info. - */ - removeInvalidItemsFromCache(query) { - const self = this; - return new Promise(resolve => { - self.tokenCache.find(query, (error, entries) => { - if (error) { - return resolve({ result: false, details: error }); - } - if (entries && entries.length > 0) { - return new Promise(resolve => { - return self.tokenCache.remove(entries, (err) => { - if (err) { - return resolve({ result: false, details: err }); - } - return resolve({ result: true }); - }); - }); - } - else { - return resolve({ result: true }); - } - }); - }); - } + } + }); } -exports.ApplicationTokenCredentialsBase = ApplicationTokenCredentialsBase; -//# sourceMappingURL=applicationTokenCredentialsBase.js.map - -/***/ }), - -/***/ 38909: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); +if (process.env["AZURE_ADAL_LOGGING_ENABLED"]) { + turnOnLogging(); +} +/** + * Provides a UserTokenCredentials object and the list of subscriptions associated with that userId across all the applicable tenants. + * This method is applicable only for organizational ids that are not 2FA enabled otherwise please use interactive login. + * + * @param {string} username The user name for the Organization Id account. + * @param {string} password The password for the Organization Id account. + * @param {object} [options] Object representing optional parameters. + * @param {string} [options.clientId] The active directory application client id. + * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} + * for an example. + * @param {string} [options.tokenAudience] The audience for which the token is requested. Valid values are 'graph', 'batch', or any other resource like 'https://vault.azure.net/'. + * If tokenAudience is 'graph' then domain should also be provided and its value should not be the default 'common' tenant. It must be a string (preferrably in a guid format). + * @param {string} [options.domain] The domain or tenant id containing this application. Default value "common". + * @param {Environment} [options.environment] The azure environment to authenticate with. + * @param {object} [options.tokenCache] The token cache. Default value is the MemoryCache object from adal. + * + * @returns {Promise} A Promise that resolves to AuthResponse that contains "credentials" and optional "subscriptions" array and rejects with an Error. + */ +function withUsernamePasswordWithAuthResponse(username, password, options) { + return __awaiter(this, void 0, void 0, function* () { + if (!options) { + options = {}; + } + if (!options.clientId) { + options.clientId = authConstants_1.AuthConstants.DEFAULT_ADAL_CLIENT_ID; + } + if (!options.domain) { + options.domain = authConstants_1.AuthConstants.AAD_COMMON_TENANT; + } + if (!options.environment) { + options.environment = ms_rest_azure_env_1.Environment.AzureCloud; + } + let creds; + let tenantList = []; + let subscriptionList = []; + try { + creds = new userTokenCredentials_1.UserTokenCredentials(options.clientId, options.domain, username, password, options.tokenAudience, options.environment); + yield creds.getToken(); + // The token cache gets propulated for all the tenants as a part of building the tenantList. + tenantList = yield subscriptionUtils_1.buildTenantList(creds); + subscriptionList = yield _getSubscriptions(creds, tenantList, options.tokenAudience); + } + catch (err) { + return Promise.reject(err); + } + return Promise.resolve({ credentials: creds, subscriptions: subscriptionList }); }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const ms_rest_js_1 = __nccwpck_require__(30812); -const login_1 = __nccwpck_require__(29325); +} +exports.withUsernamePasswordWithAuthResponse = withUsernamePasswordWithAuthResponse; /** - * Describes the credentials by retrieving token via Azure CLI. + * Provides an ApplicationTokenCredentials object and the list of subscriptions associated with that servicePrinicpalId/clientId across all the applicable tenants. + * + * @param {string} clientId The active directory application client id also known as the SPN (ServicePrincipal Name). + * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} + * for an example. + * @param {string} secret The application secret for the service principal. + * @param {string} domain The domain or tenant id containing this application. + * @param {object} [options] Object representing optional parameters. + * @param {string} [options.tokenAudience] The audience for which the token is requested. Valid values are 'graph', 'batch', or any other resource like 'https://vault.azure.net/'. + * If tokenAudience is 'graph' then domain should also be provided and its value should not be the default 'common' tenant. It must be a string (preferrably in a guid format). + * @param {Environment} [options.environment] The azure environment to authenticate with. + * @param {object} [options.tokenCache] The token cache. Default value is the MemoryCache object from adal. + * + * @returns {Promise} A Promise that resolves to AuthResponse that contains "credentials" and optional "subscriptions" array and rejects with an Error. */ -class AzureCliCredentials { - constructor(subscriptionInfo, tokenInfo, - // tslint:disable-next-line: no-inferrable-types - resource = "https://management.azure.com") { - /** - * Azure resource endpoints. - * - Defaults to Azure Resource Manager from environment: AzureCloud. "https://management.azure.com" - * - For Azure KeyVault: "https://vault.azure.net" - * - For Azure Batch: "https://batch.core.windows.net" - * - For Azure Active Directory Graph: "https://graph.windows.net" - * - * To get the resource for other clouds: - * - `az cloud list` - */ - // tslint:disable-next-line: no-inferrable-types - this.resource = "https://management.azure.com"; - /** - * The number of seconds within which it is good to renew the token. - * A constant set to 270 seconds (4.5 minutes). - */ - this._tokenRenewalMarginInSeconds = 270; - this.subscriptionInfo = subscriptionInfo; - this.tokenInfo = tokenInfo; - this.resource = resource; - } - /** - * Tries to get the new token from Azure CLI, if the token has expired or the subscription has - * changed else uses the cached accessToken. - * @return The tokenResponse (tokenType and accessToken are the two important properties). - */ - getToken() { - return __awaiter(this, void 0, void 0, function* () { - if (this._hasTokenExpired() || this._hasSubscriptionChanged() || this._hasResourceChanged()) { - try { - // refresh the access token - this.tokenInfo = yield AzureCliCredentials.getAccessToken({ - subscriptionIdOrName: this.subscriptionInfo.id, - resource: this.resource - }); - } - catch (err) { - throw new Error(`An error occurred while refreshing the new access ` + - `token:${err.stderr ? err.stderr : err.message}`); - } - } - const result = { - accessToken: this.tokenInfo.accessToken, - tokenType: this.tokenInfo.tokenType, - expiresOn: this.tokenInfo.expiresOn, - tenantId: this.tokenInfo.tenant - }; - return result; - }); - } - /** - * Signs a request with the Authentication header. - * @param The request to be signed. - */ - signRequest(webResource) { - return __awaiter(this, void 0, void 0, function* () { - const tokenResponse = yield this.getToken(); - webResource.headers.set(ms_rest_js_1.Constants.HeaderConstants.AUTHORIZATION, `${tokenResponse.tokenType} ${tokenResponse.accessToken}`); - return Promise.resolve(webResource); - }); - } - _hasTokenExpired() { - let result = true; - const now = Math.floor(Date.now() / 1000); - if (this.tokenInfo.expiresOn && - this.tokenInfo.expiresOn instanceof Date && - Math.floor(this.tokenInfo.expiresOn.getTime() / 1000) - now > this._tokenRenewalMarginInSeconds) { - result = false; +function withServicePrincipalSecretWithAuthResponse(clientId, secret, domain, options) { + return __awaiter(this, void 0, void 0, function* () { + if (!options) { + options = {}; } - return result; - } - _hasSubscriptionChanged() { - return this.subscriptionInfo.id !== this.tokenInfo.subscription; - } - _parseToken() { + if (!options.environment) { + options.environment = ms_rest_azure_env_1.Environment.AzureCloud; + } + let creds; + let subscriptionList = []; try { - const base64Url = this.tokenInfo.accessToken.split(".")[1]; - const base64 = decodeURIComponent(Buffer.from(base64Url, "base64").toString("binary").split("").map((c) => { - return "%" + ("00" + c.charCodeAt(0).toString(16)).slice(-2); - }).join("")); - return JSON.parse(base64); + creds = new applicationTokenCredentials_1.ApplicationTokenCredentials(clientId, domain, secret, options.tokenAudience, options.environment); + yield creds.getToken(); + subscriptionList = yield _getSubscriptions(creds, [domain], options.tokenAudience); } catch (err) { - const msg = `An error occurred while parsing the access token: ${err.stack}`; - throw new Error(msg); + return Promise.reject(err); + } + return Promise.resolve({ credentials: creds, subscriptions: subscriptionList }); + }); +} +exports.withServicePrincipalSecretWithAuthResponse = withServicePrincipalSecretWithAuthResponse; +/** + * Provides an ApplicationTokenCertificateCredentials object and the list of subscriptions associated with that servicePrinicpalId/clientId across all the applicable tenants. + * + * @param {string} clientId The active directory application client id also known as the SPN (ServicePrincipal Name). + * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} + * for an example. + * @param {string} certificateStringOrFilePath A PEM encoded certificate and private key OR an absolute filepath to the .pem file containing that information. For example: + * - CertificateString: "-----BEGIN PRIVATE KEY-----\n\n-----END PRIVATE KEY-----\n-----BEGIN CERTIFICATE-----\n\n-----END CERTIFICATE-----\n" + * - CertificateFilePath: **Absolute** file path of the .pem file. + * @param {string} domain The domain or tenant id containing this application. + * @param {object} [options] Object representing optional parameters. + * @param {string} [options.tokenAudience] The audience for which the token is requested. Valid values are 'graph', 'batch', or any other resource like 'https://vault.azure.net/'. + * If tokenAudience is 'graph' then domain should also be provided and its value should not be the default 'common' tenant. It must be a string (preferrably in a guid format). + * @param {Environment} [options.environment] The azure environment to authenticate with. + * @param {object} [options.tokenCache] The token cache. Default value is the MemoryCache object from adal. + * + * @returns {Promise} A Promise that resolves to AuthResponse that contains "credentials" and optional "subscriptions" array and rejects with an Error. + */ +function withServicePrincipalCertificateWithAuthResponse(clientId, certificateStringOrFilePath, domain, options) { + return __awaiter(this, void 0, void 0, function* () { + if (!options) { + options = {}; + } + if (!options.environment) { + options.environment = ms_rest_azure_env_1.Environment.AzureCloud; + } + let creds; + let subscriptionList = []; + try { + creds = applicationTokenCertificateCredentials_1.ApplicationTokenCertificateCredentials.create(clientId, certificateStringOrFilePath, domain, options); + yield creds.getToken(); + subscriptionList = yield _getSubscriptions(creds, [domain], options.tokenAudience); } + catch (err) { + return Promise.reject(err); + } + return Promise.resolve({ credentials: creds, subscriptions: subscriptionList }); + }); +} +exports.withServicePrincipalCertificateWithAuthResponse = withServicePrincipalCertificateWithAuthResponse; +function validateAuthFileContent(credsObj, filePath) { + if (!credsObj) { + throw new Error("Please provide a credsObj to validate."); } - _isAzureResourceManagerEndpoint(newResource, currentResource) { - if (newResource.endsWith("/")) - newResource = newResource.slice(0, -1); - if (currentResource.endsWith("/")) - currentResource = currentResource.slice(0, -1); - return (newResource === "https://management.core.windows.net" && - currentResource === "https://management.azure.com") || - (newResource === "https://management.azure.com" && - currentResource === "https://management.core.windows.net"); - } - _hasResourceChanged() { - const parsedToken = this._parseToken(); - // normalize the resource string, since it is possible to - // provide a resource without a trailing slash - const currentResource = parsedToken.aud && parsedToken.aud.endsWith("/") - ? parsedToken.aud.slice(0, -1) - : parsedToken.aud; - const newResource = this.resource.endsWith("/") - ? this.resource.slice(0, -1) - : this.resource; - const result = this._isAzureResourceManagerEndpoint(newResource, currentResource) - ? false - : currentResource !== newResource; - return result; - } - /** - * Gets the access token for the default or specified subscription. - * @param options Optional parameters that can be provided to get the access token. - */ - static getAccessToken(options = {}) { - return __awaiter(this, void 0, void 0, function* () { - try { - let cmd = "account get-access-token"; - if (options.subscriptionIdOrName) { - cmd += ` -s "${options.subscriptionIdOrName}"`; - } - if (options.resource) { - cmd += ` --resource ${options.resource}`; - } - const result = yield login_1.execAz(cmd); - result.expiresOn = new Date(result.expiresOn); - return result; - } - catch (err) { - const message = `An error occurred while getting credentials from ` + - `Azure CLI: ${err.stack}`; - throw new Error(message); - } - }); - } - /** - * Gets the subscription from Azure CLI. - * @param subscriptionIdOrName - The name or id of the subscription for which the information is - * required. - */ - static getSubscription(subscriptionIdOrName) { - return __awaiter(this, void 0, void 0, function* () { - if (subscriptionIdOrName && (typeof subscriptionIdOrName !== "string" || !subscriptionIdOrName.length)) { - throw new Error("'subscriptionIdOrName' must be a non-empty string."); - } - try { - let cmd = "account show"; - if (subscriptionIdOrName) { - cmd += ` -s "${subscriptionIdOrName}"`; - } - const result = yield login_1.execAz(cmd); - return result; - } - catch (err) { - const message = `An error occurred while getting information about the current subscription from ` + - `Azure CLI: ${err.stack}`; - throw new Error(message); - } - }); - } - /** - * Sets the specified subscription as the default subscription for Azure CLI. - * @param subscriptionIdOrName The name or id of the subsciption that needs to be set as the - * default subscription. - */ - static setDefaultSubscription(subscriptionIdOrName) { - return __awaiter(this, void 0, void 0, function* () { - try { - yield login_1.execAz(`account set -s ${subscriptionIdOrName}`); - } - catch (err) { - const message = `An error occurred while setting the current subscription from ` + - `Azure CLI: ${err.stack}`; - throw new Error(message); - } - }); - } - /** - * Returns a list of all the subscriptions from Azure CLI. - * @param options Optional parameters that can be provided while listing all the subcriptions. - */ - static listAllSubscriptions(options = {}) { - return __awaiter(this, void 0, void 0, function* () { - let subscriptionList = []; - try { - let cmd = "account list"; - if (options.all) { - cmd += " --all"; - } - if (options.refresh) { - cmd += "--refresh"; - } - subscriptionList = yield login_1.execAz(cmd); - if (subscriptionList && subscriptionList.length) { - for (const sub of subscriptionList) { - if (sub.cloudName) { - sub.environmentName = sub.cloudName; - delete sub.cloudName; - } - } - } - return subscriptionList; - } - catch (err) { - const message = `An error occurred while getting a list of all the subscription from ` + - `Azure CLI: ${err.stack}`; - throw new Error(message); - } - }); - } - /** - * Provides credentials that can be used by the JS SDK to interact with Azure via azure cli. - * **Pre-requisite** - * - **install azure-cli** . For more information see - * {@link https://docs.microsoft.com/en-us/cli/azure/install-azure-cli?view=azure-cli-latest Install Azure CLI} - * - **login via `az login`** - * @param options - Optional parameters that can be provided while creating AzureCliCredentials. - */ - static create(options = {}) { - return __awaiter(this, void 0, void 0, function* () { - const [subscriptinInfo, accessToken] = yield Promise.all([ - AzureCliCredentials.getSubscription(options.subscriptionIdOrName), - AzureCliCredentials.getAccessToken(options) - ]); - return new AzureCliCredentials(subscriptinInfo, accessToken, options.resource); - }); - } -} -exports.AzureCliCredentials = AzureCliCredentials; -//# sourceMappingURL=azureCliCredentials.js.map - -/***/ }), - -/***/ 80450: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tokenCredentialsBase_1 = __nccwpck_require__(5558); -const authConstants_1 = __nccwpck_require__(26700); -class DeviceTokenCredentials extends tokenCredentialsBase_1.TokenCredentialsBase { - /** - * Creates a new DeviceTokenCredentials object that gets a new access token using userCodeInfo (contains user_code, device_code) - * for authenticating user on device. - * - * When this credential is used, the script will provide a url and code. The user needs to copy the url and the code, paste it - * in a browser and authenticate over there. If successful, the script will get the access token. - * - * @constructor - * @param {string} [clientId] The active directory application client id. - * @param {string} [domain] The domain or tenant id containing this application. Default value is "common" - * @param {string} [username] The user name for account in the form: "user@example.com". - * @param {string} [tokenAudience] The audience for which the token is requested. Valid values are 'graph', 'batch', or any other resource like 'https://vault.azure.net/'. - * If tokenAudience is 'graph' then domain should also be provided and its value should not be the default 'common' tenant. It must be a string (preferrably in a guid format). - * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} - * for an example. - * @param {Environment} [environment] The azure environment to authenticate with. Default environment is "Azure" popularly known as "Public Azure Cloud". - * @param {object} [tokenCache] The token cache. Default value is the MemoryCache object from adal. - */ - constructor(clientId, domain, username, tokenAudience, environment, tokenCache) { - if (!username) { - username = "user@example.com"; - } - if (!domain) { - domain = authConstants_1.AuthConstants.AAD_COMMON_TENANT; - } - if (!clientId) { - clientId = authConstants_1.AuthConstants.DEFAULT_ADAL_CLIENT_ID; - } - super(clientId, domain, tokenAudience, environment, tokenCache); - this.username = username; - } - getToken() { - // For device auth, this is just getTokenFromCache. - return this.getTokenFromCache(this.username); - } -} -exports.DeviceTokenCredentials = DeviceTokenCredentials; -//# sourceMappingURL=deviceTokenCredentials.js.map - -/***/ }), - -/***/ 23699: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -Object.defineProperty(exports, "__esModule", ({ value: true })); -const applicationTokenCredentials_1 = __nccwpck_require__(35216); -const applicationTokenCertificateCredentials_1 = __nccwpck_require__(2265); -const deviceTokenCredentials_1 = __nccwpck_require__(80450); -const msiAppServiceTokenCredentials_1 = __nccwpck_require__(68376); -const msiTokenCredentials_1 = __nccwpck_require__(6182); -const msiVmTokenCredentials_1 = __nccwpck_require__(73287); -const tokenCredentialsBase_1 = __nccwpck_require__(5558); -const userTokenCredentials_1 = __nccwpck_require__(14502); -const adal_node_1 = __nccwpck_require__(35894); -function createAuthenticator(credentials) { - const convertedCredentials = _convert(credentials); - const authenticator = _createAuthenticatorMapper(convertedCredentials); - return authenticator; -} -exports.createAuthenticator = createAuthenticator; -function _convert(credentials) { - if (credentials instanceof msiAppServiceTokenCredentials_1.MSIAppServiceTokenCredentials) { - return new msiAppServiceTokenCredentials_1.MSIAppServiceTokenCredentials({ - msiEndpoint: credentials.msiEndpoint, - msiSecret: credentials.msiSecret, - msiApiVersion: credentials.msiApiVersion, - resource: credentials.resource - }); - } - else if (credentials instanceof msiVmTokenCredentials_1.MSIVmTokenCredentials) { - return new msiVmTokenCredentials_1.MSIVmTokenCredentials({ - resource: credentials.resource, - msiEndpoint: credentials.msiEndpoint - }); - } - else if (credentials instanceof msiTokenCredentials_1.MSITokenCredentials) { - throw new Error("MSI-credentials not one of: MSIVmTokenCredentials, MSIAppServiceTokenCredentials"); - } - else { - return credentials; - } -} -function _createAuthenticatorMapper(credentials) { - return (challenge) => new Promise((resolve, reject) => { - // Function to take token Response and format a authorization value - const _formAuthorizationValue = (err, tokenResponse) => { - if (err) { - return reject(err); - } - if (tokenResponse.error) { - return reject(tokenResponse.error); - } - tokenResponse = tokenResponse; - // Calculate the value to be set in the request's Authorization header and resume the call. - const authorizationValue = tokenResponse.tokenType + " " + tokenResponse.accessToken; - return resolve(authorizationValue); - }; - // Create a new authentication context. - if (credentials instanceof tokenCredentialsBase_1.TokenCredentialsBase) { - const context = new adal_node_1.AuthenticationContext(challenge.authorization, true, credentials.authContext && credentials.authContext.cache); - if (credentials instanceof applicationTokenCredentials_1.ApplicationTokenCredentials) { - return context.acquireTokenWithClientCredentials(challenge.resource, credentials.clientId, credentials.secret, _formAuthorizationValue); - } - else if (credentials instanceof applicationTokenCertificateCredentials_1.ApplicationTokenCertificateCredentials) { - return context.acquireTokenWithClientCertificate(challenge.resource, credentials.clientId, credentials.certificate, credentials.thumbprint, _formAuthorizationValue); - } - else if (credentials instanceof userTokenCredentials_1.UserTokenCredentials) { - return context.acquireTokenWithUsernamePassword(challenge.resource, credentials.username, credentials.password, credentials.clientId, _formAuthorizationValue); - } - else if (credentials instanceof deviceTokenCredentials_1.DeviceTokenCredentials) { - return context.acquireToken(challenge.resource, credentials.username, credentials.clientId, _formAuthorizationValue); - } - } - else if (credentials instanceof msiTokenCredentials_1.MSITokenCredentials) { - return credentials.getToken(); - } - else { - return reject(new Error("credentials must be one of: ApplicationTokenCredentials, UserTokenCredentials, " + - "DeviceTokenCredentials, MSITokenCredentials")); - } - }); -} -//# sourceMappingURL=keyVaultFactory.js.map - -/***/ }), - -/***/ 68376: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const msiTokenCredentials_1 = __nccwpck_require__(6182); -const ms_rest_js_1 = __nccwpck_require__(30812); -/** - * @class MSIAppServiceTokenCredentials - */ -class MSIAppServiceTokenCredentials extends msiTokenCredentials_1.MSITokenCredentials { - /** - * Creates an instance of MSIAppServiceTokenCredentials. - * @param {string} [options.msiEndpoint] - The local URL from which your app can request tokens. - * Either provide this parameter or set the environment variable `MSI_ENDPOINT`. - * For example: `MSI_ENDPOINT="http://127.0.0.1:41741/MSI/token/"` - * @param {string} [options.msiSecret] - The secret used in communication between your code and the local MSI agent. - * Either provide this parameter or set the environment variable `MSI_SECRET`. - * For example: `MSI_SECRET="69418689F1E342DD946CB82994CDA3CB"` - * @param {string} [options.resource] - The resource uri or token audience for which the token is needed. - * For e.g. it can be: - * - resource management endpoint "https://management.azure.com/" (default) - * - management endpoint "https://management.core.windows.net/" - * @param {string} [options.msiApiVersion] - The api-version of the local MSI agent. Default value is "2017-09-01". - */ - constructor(options) { - if (!options) - options = {}; - super(options); - options.msiEndpoint = options.msiEndpoint || process.env["MSI_ENDPOINT"]; - options.msiSecret = options.msiSecret || process.env["MSI_SECRET"]; - if (!options.msiEndpoint || (options.msiEndpoint && typeof options.msiEndpoint.valueOf() !== "string")) { - throw new Error('Either provide "msiEndpoint" as a property of the "options" object ' + - 'or set the environment variable "MSI_ENDPOINT" and it must be of type "string".'); - } - if (!options.msiSecret || (options.msiSecret && typeof options.msiSecret.valueOf() !== "string")) { - throw new Error('Either provide "msiSecret" as a property of the "options" object ' + - 'or set the environment variable "MSI_SECRET" and it must be of type "string".'); - } - if (!options.msiApiVersion) { - options.msiApiVersion = "2017-09-01"; - } - else if (typeof options.msiApiVersion.valueOf() !== "string") { - throw new Error("msiApiVersion must be a uri."); - } - this.msiEndpoint = options.msiEndpoint; - this.msiSecret = options.msiSecret; - this.msiApiVersion = options.msiApiVersion; - } - /** - * Prepares and sends a GET request to a service endpoint indicated by the app service, which responds with the access token. - * @return {Promise} Promise with the tokenResponse (tokenType and accessToken are the two important properties). - */ - getToken() { - return __awaiter(this, void 0, void 0, function* () { - const reqOptions = this.prepareRequestOptions(); - let opRes; - let result; - opRes = yield this._httpClient.sendRequest(reqOptions); - if (opRes.bodyAsText === undefined || opRes.bodyAsText.indexOf("ExceptionMessage") !== -1) { - throw new Error(`MSI: Failed to retrieve a token from "${reqOptions.url}" with an error: ${opRes.bodyAsText}`); - } - result = this.parseTokenResponse(opRes.bodyAsText); - if (!result.tokenType) { - throw new Error(`Invalid token response, did not find tokenType. Response body is: ${opRes.bodyAsText}`); - } - else if (!result.accessToken) { - throw new Error(`Invalid token response, did not find accessToken. Response body is: ${opRes.bodyAsText}`); - } - return result; - }); - } - prepareRequestOptions() { - const endpoint = this.msiEndpoint.endsWith("/") ? this.msiEndpoint : `${this.msiEndpoint}/`; - const resource = encodeURIComponent(this.resource); - const getUrl = `${endpoint}?resource=${resource}&api-version=${this.msiApiVersion}`; - const reqOptions = { - url: getUrl, - headers: { - "secret": this.msiSecret - }, - method: "GET" - }; - const webResource = new ms_rest_js_1.WebResource(); - return webResource.prepare(reqOptions); - } -} -exports.MSIAppServiceTokenCredentials = MSIAppServiceTokenCredentials; -//# sourceMappingURL=msiAppServiceTokenCredentials.js.map - -/***/ }), - -/***/ 6182: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const ms_rest_js_1 = __nccwpck_require__(30812); -const authConstants_1 = __nccwpck_require__(26700); -/** - * @class MSITokenCredentials - Provides information about managed service identity token credentials. - * This object can only be used to acquire token on a virtual machine provisioned in Azure with managed service identity. - */ -class MSITokenCredentials { - /** - * Creates an instance of MSITokenCredentials. - * @param {object} [options] - Optional parameters - * @param {string} [options.resource] - The resource uri or token audience for which the token is needed. - * For e.g. it can be: - * - resource management endpoint "https://management.azure.com/"(default) - * - management endpoint "https://management.core.windows.net/" - */ - constructor(options) { - if (!options) - options = {}; - if (!options.resource) { - options.resource = authConstants_1.AuthConstants.RESOURCE_MANAGER_ENDPOINT; - } - else if (typeof options.resource.valueOf() !== "string") { - throw new Error("resource must be a uri."); - } - this.resource = options.resource; - this._httpClient = options.httpClient || new ms_rest_js_1.DefaultHttpClient(); - } - /** - * Parses a tokenResponse json string into a object, and converts properties on the first level to camelCase. - * This method tries to standardize the tokenResponse - * @param {string} body A json string - * @return {object} [tokenResponse] The tokenResponse (tokenType and accessToken are the two important properties). - */ - parseTokenResponse(body) { - // Docs show different examples of possible MSI responses for different services. https://docs.microsoft.com/en-us/azure/active-directory/managed-service-identity/overview - // expires_on - is a Date like string in this doc - // - https://docs.microsoft.com/en-us/azure/app-service/app-service-managed-service-identity#rest-protocol-examples - // In other doc it is stringified number. - // - https://docs.microsoft.com/en-us/azure/active-directory/managed-service-identity/tutorial-linux-vm-access-arm#get-an-access-token-using-the-vms-identity-and-use-it-to-call-resource-manager - const parsedBody = JSON.parse(body); - parsedBody.accessToken = parsedBody["access_token"]; - delete parsedBody["access_token"]; - parsedBody.tokenType = parsedBody["token_type"]; - delete parsedBody["token_type"]; - if (parsedBody["refresh_token"]) { - parsedBody.refreshToken = parsedBody["refresh_token"]; - delete parsedBody["refresh_token"]; - } - if (parsedBody["expires_in"]) { - parsedBody.expiresIn = parsedBody["expires_in"]; - if (typeof parsedBody["expires_in"] === "string") { - // normal number as a string '1504130527' - parsedBody.expiresIn = parseInt(parsedBody["expires_in"], 10); - } - delete parsedBody["expires_in"]; - } - if (parsedBody["not_before"]) { - parsedBody.notBefore = parsedBody["not_before"]; - if (typeof parsedBody["not_before"] === "string") { - // normal number as a string '1504130527' - parsedBody.notBefore = parseInt(parsedBody["not_before"], 10); - } - delete parsedBody["not_before"]; - } - if (parsedBody["expires_on"]) { - parsedBody.expiresOn = parsedBody["expires_on"]; - if (typeof parsedBody["expires_on"] === "string") { - // possibly a Date string '09/14/2017 00:00:00 PM +00:00' - if (parsedBody["expires_on"].includes(":") || parsedBody["expires_on"].includes("/")) { - parsedBody.expiresOn = new Date(parsedBody["expires_on"], 10); - } - else { - // normal number as a string '1504130527' - parsedBody.expiresOn = new Date(parseInt(parsedBody["expires_on"], 10)); - } - } - delete parsedBody["expires_on"]; - } - return parsedBody; - } - /** - * Signs a request with the Authentication header. - * - * @param {webResource} The WebResource to be signed. - * @return {Promise} Promise with signed WebResource. - */ - signRequest(webResource) { - return __awaiter(this, void 0, void 0, function* () { - const tokenResponse = yield this.getToken(); - webResource.headers.set(ms_rest_js_1.Constants.HeaderConstants.AUTHORIZATION, `${tokenResponse.tokenType} ${tokenResponse.accessToken}`); - return Promise.resolve(webResource); - }); - } -} -exports.MSITokenCredentials = MSITokenCredentials; -//# sourceMappingURL=msiTokenCredentials.js.map - -/***/ }), - -/***/ 73287: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const msiTokenCredentials_1 = __nccwpck_require__(6182); -const ms_rest_js_1 = __nccwpck_require__(30812); -/** - * @class MSIVmTokenCredentials - */ -class MSIVmTokenCredentials extends msiTokenCredentials_1.MSITokenCredentials { - constructor(options) { - if (!options) - options = {}; - super(options); - if (!options.msiEndpoint) { - options.msiEndpoint = "http://169.254.169.254/metadata/identity/oauth2/token"; - } - else if (typeof options.msiEndpoint !== "string") { - throw new Error("msiEndpoint must be a string."); - } - const urlBuilder = ms_rest_js_1.URLBuilder.parse(options.msiEndpoint); - if (!urlBuilder.getScheme()) { - options.msiEndpoint = `http://${options.msiEndpoint}`; - } - if (!options.apiVersion) { - options.apiVersion = "2018-02-01"; - } - else if (typeof options.apiVersion !== "string") { - throw new Error("apiVersion must be a string."); - } - if (!options.httpMethod) { - options.httpMethod = "GET"; - } - this.apiVersion = options.apiVersion; - this.msiEndpoint = options.msiEndpoint; - this.httpMethod = options.httpMethod; - this.objectId = options.objectId; - this.clientId = options.clientId; - this.identityId = options.identityId; - } - /** - * Prepares and sends a POST request to a service endpoint hosted on the Azure VM, which responds with the access token. - * @return {Promise} Promise with the tokenResponse (tokenType and accessToken are the two important properties). - */ - getToken() { - return __awaiter(this, void 0, void 0, function* () { - const reqOptions = this.prepareRequestOptions(); - let opRes; - let result; - opRes = yield this._httpClient.sendRequest(reqOptions); - result = this.parseTokenResponse(opRes.bodyAsText); - if (!result.tokenType) { - throw new Error(`Invalid token response, did not find tokenType. Response body is: ${opRes.bodyAsText}`); - } - else if (!result.accessToken) { - throw new Error(`Invalid token response, did not find accessToken. Response body is: ${opRes.bodyAsText}`); - } - return result; - }); - } - prepareRequestOptions() { - const reqOptions = { - url: this.msiEndpoint, - headers: { - "Content-Type": "application/x-www-form-urlencoded; charset=UTF-8", - "Metadata": "true" - }, - method: this.httpMethod, - queryParameters: { - "api-version": this.apiVersion, - "resource": this.resource, - "object_id": this.objectId, - "client_id": this.clientId, - "mi_res_id": this.identityId - } - }; - const webResource = new ms_rest_js_1.WebResource(); - return webResource.prepare(reqOptions); - } -} -exports.MSIVmTokenCredentials = MSIVmTokenCredentials; -//# sourceMappingURL=msiVmTokenCredentials.js.map - -/***/ }), - -/***/ 5558: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const ms_rest_js_1 = __nccwpck_require__(30812); -const ms_rest_azure_env_1 = __nccwpck_require__(40787); -const adal_node_1 = __nccwpck_require__(35894); -class TokenCredentialsBase { - constructor(clientId, domain, tokenAudience, environment = ms_rest_azure_env_1.Environment.AzureCloud, tokenCache = new adal_node_1.MemoryCache()) { - this.clientId = clientId; - this.domain = domain; - this.tokenAudience = tokenAudience; - this.environment = environment; - this.tokenCache = tokenCache; - if (!clientId || typeof clientId.valueOf() !== "string") { - throw new Error("clientId must be a non empty string."); - } - if (!domain || typeof domain.valueOf() !== "string") { - throw new Error("domain must be a non empty string."); - } - if (this.tokenAudience === "graph" && this.domain.toLowerCase() === "common") { - throw new Error(`${"If the tokenAudience is specified as \"graph\" then \"domain\" cannot be defaulted to \"commmon\" tenant.\ - It must be the actual tenant (preferrably a string in a guid format)."}`); - } - const authorityUrl = this.environment.activeDirectoryEndpointUrl + this.domain; - this.authContext = new adal_node_1.AuthenticationContext(authorityUrl, this.environment.validateAuthority, this.tokenCache); - } - getActiveDirectoryResourceId() { - let resource = this.environment.activeDirectoryResourceId; - if (this.tokenAudience) { - resource = this.tokenAudience; - if (this.tokenAudience.toLowerCase() === "graph") { - resource = this.environment.activeDirectoryGraphResourceId; - } - else if (this.tokenAudience.toLowerCase() === "batch") { - resource = this.environment.batchResourceId; - } - } - return resource; - } - getTokenFromCache(username) { - const self = this; - const resource = this.getActiveDirectoryResourceId(); - return new Promise((resolve, reject) => { - self.authContext.acquireToken(resource, username, self.clientId, (error, tokenResponse) => { - if (error) { - return reject(error); - } - if (tokenResponse.error || tokenResponse.errorDescription) { - return reject(tokenResponse); - } - return resolve(tokenResponse); - }); - }); - } - /** - * Signs a request with the Authentication header. - * - * @param {webResource} The WebResource to be signed. - * @param {function(error)} callback The callback function. - * @return {undefined} - */ - signRequest(webResource) { - return __awaiter(this, void 0, void 0, function* () { - const tokenResponse = yield this.getToken(); - webResource.headers.set(ms_rest_js_1.Constants.HeaderConstants.AUTHORIZATION, `${tokenResponse.tokenType} ${tokenResponse.accessToken}`); - return Promise.resolve(webResource); - }); - } -} -exports.TokenCredentialsBase = TokenCredentialsBase; -//# sourceMappingURL=tokenCredentialsBase.js.map - -/***/ }), - -/***/ 14502: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tokenCredentialsBase_1 = __nccwpck_require__(5558); -class UserTokenCredentials extends tokenCredentialsBase_1.TokenCredentialsBase { - /** - * Creates a new UserTokenCredentials object. - * - * @constructor - * @param {string} clientId The active directory application client id. - * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} - * for an example. - * @param {string} domain The domain or tenant id containing this application. - * @param {string} username The user name for the Organization Id account. - * @param {string} password The password for the Organization Id account. - * @param {string} [tokenAudience] The audience for which the token is requested. Valid values are 'graph', 'batch', or any other resource like 'https://vault.azure.net/'. - * If tokenAudience is 'graph' then domain should also be provided and its value should not be the default 'common' tenant. It must be a string (preferrably in a guid format). - * @param {Environment} [environment] The azure environment to authenticate with. - * @param {object} [tokenCache] The token cache. Default value is the MemoryCache object from adal. - */ - constructor(clientId, domain, username, password, tokenAudience, environment, tokenCache) { - if (!clientId || typeof clientId.valueOf() !== "string") { - throw new Error("clientId must be a non empty string."); - } - if (!domain || typeof domain.valueOf() !== "string") { - throw new Error("domain must be a non empty string."); - } - if (!username || typeof username.valueOf() !== "string") { - throw new Error("username must be a non empty string."); - } - if (!password || typeof password.valueOf() !== "string") { - throw new Error("password must be a non empty string."); - } - super(clientId, domain, tokenAudience, environment, tokenCache); - this.username = username; - this.password = password; - } - crossCheckUserNameWithToken(username, userIdFromToken) { - // to maintain the casing consistency between "azureprofile.json" and token cache. (RD 1996587) - // use the "userId" here, which should be the same with "username" except the casing. - return (username.toLowerCase() === userIdFromToken.toLowerCase()); - } - /** - * Tries to get the token from cache initially. If that is unsuccessful then it tries to get the token from ADAL. - * @returns {Promise} - * {object} [tokenResponse] The tokenResponse (tokenType and accessToken are the two important properties). - * @memberof UserTokenCredentials - */ - getToken() { - return __awaiter(this, void 0, void 0, function* () { - try { - return yield this.getTokenFromCache(this.username); - } - catch (error) { - const self = this; - const resource = this.getActiveDirectoryResourceId(); - return new Promise((resolve, reject) => { - self.authContext.acquireTokenWithUsernamePassword(resource, self.username, self.password, self.clientId, (error, tokenResponse) => { - if (error) { - return reject(error); - } - if (tokenResponse.error || tokenResponse.errorDescription) { - return reject(tokenResponse); - } - tokenResponse = tokenResponse; - if (self.crossCheckUserNameWithToken(self.username, tokenResponse.userId)) { - return resolve(tokenResponse); - } - else { - return reject(`The userId "${tokenResponse.userId}" in access token doesn"t match the username "${self.username}" provided during authentication.`); - } - }); - }); - } - }); - } -} -exports.UserTokenCredentials = UserTokenCredentials; -//# sourceMappingURL=userTokenCredentials.js.map - -/***/ }), - -/***/ 29325: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const adal = __nccwpck_require__(35894); -const msRest = __nccwpck_require__(30812); -const child_process_1 = __nccwpck_require__(63129); -const fs_1 = __nccwpck_require__(35747); -const ms_rest_azure_env_1 = __nccwpck_require__(40787); -const applicationTokenCredentials_1 = __nccwpck_require__(35216); -const applicationTokenCertificateCredentials_1 = __nccwpck_require__(2265); -const deviceTokenCredentials_1 = __nccwpck_require__(80450); -const userTokenCredentials_1 = __nccwpck_require__(14502); -const authConstants_1 = __nccwpck_require__(26700); -const subscriptionUtils_1 = __nccwpck_require__(47813); -const msiVmTokenCredentials_1 = __nccwpck_require__(73287); -const msiAppServiceTokenCredentials_1 = __nccwpck_require__(68376); -/** - * @constant {Array} managementPlaneTokenAudiences - Urls for management plane token - * audience across different azure environments. - */ -const managementPlaneTokenAudiences = [ - "https://management.core.windows.net/", - "https://management.core.chinacloudapi.cn/", - "https://management.core.usgovcloudapi.net/", - "https://management.core.cloudapi.de/", - "https://management.azure.com/", - "https://management.core.windows.net", - "https://management.core.chinacloudapi.cn", - "https://management.core.usgovcloudapi.net", - "https://management.core.cloudapi.de", - "https://management.azure.com" -]; -function turnOnLogging() { - const log = adal.Logging; - log.setLoggingOptions({ - level: 3, - log: function (level, message, error) { - level; - console.info(message); - if (error) { - console.error(error); - } - } - }); -} -if (process.env["AZURE_ADAL_LOGGING_ENABLED"]) { - turnOnLogging(); -} -/** - * Provides a UserTokenCredentials object and the list of subscriptions associated with that userId across all the applicable tenants. - * This method is applicable only for organizational ids that are not 2FA enabled otherwise please use interactive login. - * - * @param {string} username The user name for the Organization Id account. - * @param {string} password The password for the Organization Id account. - * @param {object} [options] Object representing optional parameters. - * @param {string} [options.clientId] The active directory application client id. - * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} - * for an example. - * @param {string} [options.tokenAudience] The audience for which the token is requested. Valid values are 'graph', 'batch', or any other resource like 'https://vault.azure.net/'. - * If tokenAudience is 'graph' then domain should also be provided and its value should not be the default 'common' tenant. It must be a string (preferrably in a guid format). - * @param {string} [options.domain] The domain or tenant id containing this application. Default value "common". - * @param {Environment} [options.environment] The azure environment to authenticate with. - * @param {object} [options.tokenCache] The token cache. Default value is the MemoryCache object from adal. - * - * @returns {Promise} A Promise that resolves to AuthResponse that contains "credentials" and optional "subscriptions" array and rejects with an Error. - */ -function withUsernamePasswordWithAuthResponse(username, password, options) { - return __awaiter(this, void 0, void 0, function* () { - if (!options) { - options = {}; - } - if (!options.clientId) { - options.clientId = authConstants_1.AuthConstants.DEFAULT_ADAL_CLIENT_ID; - } - if (!options.domain) { - options.domain = authConstants_1.AuthConstants.AAD_COMMON_TENANT; - } - if (!options.environment) { - options.environment = ms_rest_azure_env_1.Environment.AzureCloud; - } - let creds; - let tenantList = []; - let subscriptionList = []; - try { - creds = new userTokenCredentials_1.UserTokenCredentials(options.clientId, options.domain, username, password, options.tokenAudience, options.environment); - yield creds.getToken(); - // The token cache gets propulated for all the tenants as a part of building the tenantList. - tenantList = yield subscriptionUtils_1.buildTenantList(creds); - subscriptionList = yield _getSubscriptions(creds, tenantList, options.tokenAudience); - } - catch (err) { - return Promise.reject(err); - } - return Promise.resolve({ credentials: creds, subscriptions: subscriptionList }); - }); -} -exports.withUsernamePasswordWithAuthResponse = withUsernamePasswordWithAuthResponse; -/** - * Provides an ApplicationTokenCredentials object and the list of subscriptions associated with that servicePrinicpalId/clientId across all the applicable tenants. - * - * @param {string} clientId The active directory application client id also known as the SPN (ServicePrincipal Name). - * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} - * for an example. - * @param {string} secret The application secret for the service principal. - * @param {string} domain The domain or tenant id containing this application. - * @param {object} [options] Object representing optional parameters. - * @param {string} [options.tokenAudience] The audience for which the token is requested. Valid values are 'graph', 'batch', or any other resource like 'https://vault.azure.net/'. - * If tokenAudience is 'graph' then domain should also be provided and its value should not be the default 'common' tenant. It must be a string (preferrably in a guid format). - * @param {Environment} [options.environment] The azure environment to authenticate with. - * @param {object} [options.tokenCache] The token cache. Default value is the MemoryCache object from adal. - * - * @returns {Promise} A Promise that resolves to AuthResponse that contains "credentials" and optional "subscriptions" array and rejects with an Error. - */ -function withServicePrincipalSecretWithAuthResponse(clientId, secret, domain, options) { - return __awaiter(this, void 0, void 0, function* () { - if (!options) { - options = {}; - } - if (!options.environment) { - options.environment = ms_rest_azure_env_1.Environment.AzureCloud; - } - let creds; - let subscriptionList = []; - try { - creds = new applicationTokenCredentials_1.ApplicationTokenCredentials(clientId, domain, secret, options.tokenAudience, options.environment); - yield creds.getToken(); - subscriptionList = yield _getSubscriptions(creds, [domain], options.tokenAudience); - } - catch (err) { - return Promise.reject(err); - } - return Promise.resolve({ credentials: creds, subscriptions: subscriptionList }); - }); -} -exports.withServicePrincipalSecretWithAuthResponse = withServicePrincipalSecretWithAuthResponse; -/** - * Provides an ApplicationTokenCertificateCredentials object and the list of subscriptions associated with that servicePrinicpalId/clientId across all the applicable tenants. - * - * @param {string} clientId The active directory application client id also known as the SPN (ServicePrincipal Name). - * See {@link https://azure.microsoft.com/en-us/documentation/articles/active-directory-devquickstarts-dotnet/ Active Directory Quickstart for .Net} - * for an example. - * @param {string} certificateStringOrFilePath A PEM encoded certificate and private key OR an absolute filepath to the .pem file containing that information. For example: - * - CertificateString: "-----BEGIN PRIVATE KEY-----\n\n-----END PRIVATE KEY-----\n-----BEGIN CERTIFICATE-----\n\n-----END CERTIFICATE-----\n" - * - CertificateFilePath: **Absolute** file path of the .pem file. - * @param {string} domain The domain or tenant id containing this application. - * @param {object} [options] Object representing optional parameters. - * @param {string} [options.tokenAudience] The audience for which the token is requested. Valid values are 'graph', 'batch', or any other resource like 'https://vault.azure.net/'. - * If tokenAudience is 'graph' then domain should also be provided and its value should not be the default 'common' tenant. It must be a string (preferrably in a guid format). - * @param {Environment} [options.environment] The azure environment to authenticate with. - * @param {object} [options.tokenCache] The token cache. Default value is the MemoryCache object from adal. - * - * @returns {Promise} A Promise that resolves to AuthResponse that contains "credentials" and optional "subscriptions" array and rejects with an Error. - */ -function withServicePrincipalCertificateWithAuthResponse(clientId, certificateStringOrFilePath, domain, options) { - return __awaiter(this, void 0, void 0, function* () { - if (!options) { - options = {}; - } - if (!options.environment) { - options.environment = ms_rest_azure_env_1.Environment.AzureCloud; - } - let creds; - let subscriptionList = []; - try { - creds = applicationTokenCertificateCredentials_1.ApplicationTokenCertificateCredentials.create(clientId, certificateStringOrFilePath, domain, options); - yield creds.getToken(); - subscriptionList = yield _getSubscriptions(creds, [domain], options.tokenAudience); - } - catch (err) { - return Promise.reject(err); - } - return Promise.resolve({ credentials: creds, subscriptions: subscriptionList }); - }); -} -exports.withServicePrincipalCertificateWithAuthResponse = withServicePrincipalCertificateWithAuthResponse; -function validateAuthFileContent(credsObj, filePath) { - if (!credsObj) { - throw new Error("Please provide a credsObj to validate."); - } - if (!filePath) { - throw new Error("Please provide a filePath."); + if (!filePath) { + throw new Error("Please provide a filePath."); } if (!credsObj.clientId) { throw new Error(`"clientId" is missing from the auth file: ${filePath}.`); @@ -10627,7 +9449,7 @@ function lexSort (a, b) { */ -var _ = __nccwpck_require__(4987); +var _ = __nccwpck_require__(83571); var ac = __nccwpck_require__(69775); var authParams = __nccwpck_require__(74854); @@ -10688,7 +9510,7 @@ module.exports = exports; */ -var _ = __nccwpck_require__(4987); +var _ = __nccwpck_require__(83571); var constants = __nccwpck_require__(10282); var UserCodeResponseFields = constants.UserCodeResponseFields; @@ -11516,7 +10338,7 @@ module.exports = exports; var request = __nccwpck_require__(48699); var url = __nccwpck_require__(78835); -var _ = __nccwpck_require__(4987); +var _ = __nccwpck_require__(83571); var AADConstants = __nccwpck_require__(10282).AADConstants; var Logger = __nccwpck_require__(92288).Logger; @@ -11802,7 +10624,7 @@ module.exports.W = Authority; -var _ = __nccwpck_require__(4987); +var _ = __nccwpck_require__(83571); var crypto = __nccwpck_require__(76417); __nccwpck_require__(1693); // Adds a number of convenience methods to the builtin Date object. @@ -12651,8 +11473,8 @@ module.exports = Constants; */ -var _ = __nccwpck_require__(4987); -var uuid = __nccwpck_require__(19744); // want to replace with this in the future: https://gist.github.com/jed/982883 +var _ = __nccwpck_require__(83571); +var uuid = __nccwpck_require__(2155); // want to replace with this in the future: https://gist.github.com/jed/982883 @@ -12896,7 +11718,7 @@ module.exports = exports; -var _ = __nccwpck_require__(4987); +var _ = __nccwpck_require__(83571); /** * Constructs a new in memory token cache. @@ -12996,7 +11818,7 @@ module.exports = MemoryCache; var request = __nccwpck_require__(48699); var url = __nccwpck_require__(78835); var DOMParser = __nccwpck_require__(57286).DOMParser; -var _ = __nccwpck_require__(4987); +var _ = __nccwpck_require__(83571); var Logger = __nccwpck_require__(92288).Logger; var util = __nccwpck_require__(5336); @@ -13350,10 +12172,10 @@ module.exports = Mex; */ -var _ = __nccwpck_require__(4987); +var _ = __nccwpck_require__(83571); __nccwpck_require__(1693); // Adds a number of convenience methods to the builtin Date object. var querystring = __nccwpck_require__(71191); -var uuid = __nccwpck_require__(19744); +var uuid = __nccwpck_require__(2155); var request = __nccwpck_require__(48699); var url = __nccwpck_require__(78835); var async = __nccwpck_require__(57888); @@ -13898,7 +12720,7 @@ var util = __nccwpck_require__(5336); __nccwpck_require__(1693); var jws = __nccwpck_require__(4636); -var uuid = __nccwpck_require__(19744); +var uuid = __nccwpck_require__(2155); /** * JavaScript dates are in milliseconds, but JWT dates are in seconds. @@ -14692,7 +13514,7 @@ module.exports = TokenRequest; var querystring = __nccwpck_require__(71191); var request = __nccwpck_require__(48699); -var _ = __nccwpck_require__(4987); +var _ = __nccwpck_require__(83571); var url = __nccwpck_require__(78835); var constants = __nccwpck_require__(10282); @@ -14965,7 +13787,7 @@ module.exports = UserRealm; */ -var _ = __nccwpck_require__(4987); +var _ = __nccwpck_require__(83571); var adalIdConstants = __nccwpck_require__(10282).AdalIdParameters; var os = __nccwpck_require__(12087); var url = __nccwpck_require__(78835); @@ -15161,7 +13983,7 @@ module.exports.convertRegularToUrlSafeBase64EncodedString = convertRegularToUrlS var request = __nccwpck_require__(48699); -var uuid = __nccwpck_require__(19744); +var uuid = __nccwpck_require__(2155); var Logger = __nccwpck_require__(92288).Logger; var util = __nccwpck_require__(5336); @@ -15703,7 +14525,7 @@ module.exports = WSTrustResponse; */ -var _ = __nccwpck_require__(4987); +var _ = __nccwpck_require__(83571); var select = __nccwpck_require__(74277); var XMLSerializer = __nccwpck_require__(57286).XMLSerializer; @@ -15809,221 +14631,6 @@ var exports = { module.exports = exports; -/***/ }), - -/***/ 19744: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var v1 = __nccwpck_require__(64425); -var v4 = __nccwpck_require__(85258); - -var uuid = v4; -uuid.v1 = v1; -uuid.v4 = v4; - -module.exports = uuid; - - -/***/ }), - -/***/ 54121: -/***/ ((module) => { - -/** - * Convert array of 16 byte values to UUID string format of the form: - * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX - */ -var byteToHex = []; -for (var i = 0; i < 256; ++i) { - byteToHex[i] = (i + 0x100).toString(16).substr(1); -} - -function bytesToUuid(buf, offset) { - var i = offset || 0; - var bth = byteToHex; - // join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4 - return ([ - bth[buf[i++]], bth[buf[i++]], - bth[buf[i++]], bth[buf[i++]], '-', - bth[buf[i++]], bth[buf[i++]], '-', - bth[buf[i++]], bth[buf[i++]], '-', - bth[buf[i++]], bth[buf[i++]], '-', - bth[buf[i++]], bth[buf[i++]], - bth[buf[i++]], bth[buf[i++]], - bth[buf[i++]], bth[buf[i++]] - ]).join(''); -} - -module.exports = bytesToUuid; - - -/***/ }), - -/***/ 57455: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -// Unique ID creation requires a high quality random # generator. In node.js -// this is pretty straight-forward - we use the crypto API. - -var crypto = __nccwpck_require__(76417); - -module.exports = function nodeRNG() { - return crypto.randomBytes(16); -}; - - -/***/ }), - -/***/ 64425: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var rng = __nccwpck_require__(57455); -var bytesToUuid = __nccwpck_require__(54121); - -// **`v1()` - Generate time-based UUID** -// -// Inspired by https://github.com/LiosK/UUID.js -// and http://docs.python.org/library/uuid.html - -var _nodeId; -var _clockseq; - -// Previous uuid creation time -var _lastMSecs = 0; -var _lastNSecs = 0; - -// See https://github.com/uuidjs/uuid for API details -function v1(options, buf, offset) { - var i = buf && offset || 0; - var b = buf || []; - - options = options || {}; - var node = options.node || _nodeId; - var clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; - - // node and clockseq need to be initialized to random values if they're not - // specified. We do this lazily to minimize issues related to insufficient - // system entropy. See #189 - if (node == null || clockseq == null) { - var seedBytes = rng(); - if (node == null) { - // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) - node = _nodeId = [ - seedBytes[0] | 0x01, - seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5] - ]; - } - if (clockseq == null) { - // Per 4.2.2, randomize (14 bit) clockseq - clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; - } - } - - // UUID timestamps are 100 nano-second units since the Gregorian epoch, - // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so - // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' - // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. - var msecs = options.msecs !== undefined ? options.msecs : new Date().getTime(); - - // Per 4.2.1.2, use count of uuid's generated during the current clock - // cycle to simulate higher resolution clock - var nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; - - // Time since last uuid creation (in msecs) - var dt = (msecs - _lastMSecs) + (nsecs - _lastNSecs)/10000; - - // Per 4.2.1.2, Bump clockseq on clock regression - if (dt < 0 && options.clockseq === undefined) { - clockseq = clockseq + 1 & 0x3fff; - } - - // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new - // time interval - if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { - nsecs = 0; - } - - // Per 4.2.1.2 Throw error if too many uuids are requested - if (nsecs >= 10000) { - throw new Error('uuid.v1(): Can\'t create more than 10M uuids/sec'); - } - - _lastMSecs = msecs; - _lastNSecs = nsecs; - _clockseq = clockseq; - - // Per 4.1.4 - Convert from unix epoch to Gregorian epoch - msecs += 12219292800000; - - // `time_low` - var tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; - b[i++] = tl >>> 24 & 0xff; - b[i++] = tl >>> 16 & 0xff; - b[i++] = tl >>> 8 & 0xff; - b[i++] = tl & 0xff; - - // `time_mid` - var tmh = (msecs / 0x100000000 * 10000) & 0xfffffff; - b[i++] = tmh >>> 8 & 0xff; - b[i++] = tmh & 0xff; - - // `time_high_and_version` - b[i++] = tmh >>> 24 & 0xf | 0x10; // include version - b[i++] = tmh >>> 16 & 0xff; - - // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) - b[i++] = clockseq >>> 8 | 0x80; - - // `clock_seq_low` - b[i++] = clockseq & 0xff; - - // `node` - for (var n = 0; n < 6; ++n) { - b[i + n] = node[n]; - } - - return buf ? buf : bytesToUuid(b); -} - -module.exports = v1; - - -/***/ }), - -/***/ 85258: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var rng = __nccwpck_require__(57455); -var bytesToUuid = __nccwpck_require__(54121); - -function v4(options, buf, offset) { - var i = buf && offset || 0; - - if (typeof(options) == 'string') { - buf = options === 'binary' ? new Array(16) : null; - options = null; - } - options = options || {}; - - var rnds = options.random || (options.rng || rng)(); - - // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` - rnds[6] = (rnds[6] & 0x0f) | 0x40; - rnds[8] = (rnds[8] & 0x3f) | 0x80; - - // Copy bytes to buffer, if provided - if (buf) { - for (var ii = 0; ii < 16; ++ii) { - buf[i + ii] = rnds[ii]; - } - } - - return buf || bytesToUuid(rnds); -} - -module.exports = v4; - - /***/ }), /***/ 64941: @@ -21617,6 +20224,18 @@ function validateKeyword(definition, throwError) { } +/***/ }), + +/***/ 65063: +/***/ ((module) => { + +"use strict"; + +module.exports = function () { + return /[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-PRZcf-nqry=><]/g; +}; + + /***/ }), /***/ 52068: @@ -22093,241483 +20712,235995 @@ exports.TrackerStream = __nccwpck_require__(31375) /***/ }), -/***/ 10165: +/***/ 92931: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. -var EventEmitter = __nccwpck_require__(28614).EventEmitter -var util = __nccwpck_require__(31669) +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. -var trackerId = 0 -var TrackerBase = module.exports = function (name) { - EventEmitter.call(this) - this.id = ++trackerId - this.name = name -} -util.inherits(TrackerBase, EventEmitter) -/***/ }), +/**/ -/***/ 60660: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +var pna = __nccwpck_require__(47810); +/**/ -"use strict"; +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) { + keys.push(key); + }return keys; +}; +/**/ -var util = __nccwpck_require__(31669) -var TrackerBase = __nccwpck_require__(10165) -var Tracker = __nccwpck_require__(8074) -var TrackerStream = __nccwpck_require__(31375) +module.exports = Duplex; -var TrackerGroup = module.exports = function (name) { - TrackerBase.call(this, name) - this.parentGroup = null - this.trackers = [] - this.completion = {} - this.weight = {} - this.totalWeight = 0 - this.finished = false - this.bubbleChange = bubbleChange(this) -} -util.inherits(TrackerGroup, TrackerBase) +/**/ +var util = Object.create(__nccwpck_require__(95898)); +util.inherits = __nccwpck_require__(44124); +/**/ -function bubbleChange (trackerGroup) { - return function (name, completed, tracker) { - trackerGroup.completion[tracker.id] = completed - if (trackerGroup.finished) return - trackerGroup.emit('change', name || trackerGroup.name, trackerGroup.completed(), trackerGroup) - } -} +var Readable = __nccwpck_require__(70661); +var Writable = __nccwpck_require__(7631); -TrackerGroup.prototype.nameInTree = function () { - var names = [] - var from = this - while (from) { - names.unshift(from.name) - from = from.parentGroup - } - return names.join('/') -} +util.inherits(Duplex, Readable); -TrackerGroup.prototype.addUnit = function (unit, weight) { - if (unit.addUnit) { - var toTest = this - while (toTest) { - if (unit === toTest) { - throw new Error( - 'Attempted to add tracker group ' + - unit.name + ' to tree that already includes it ' + - this.nameInTree(this)) - } - toTest = toTest.parentGroup - } - unit.parentGroup = this +{ + // avoid scope creep, the keys array can then be collected + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; } - this.weight[unit.id] = weight || 1 - this.totalWeight += this.weight[unit.id] - this.trackers.push(unit) - this.completion[unit.id] = unit.completed() - unit.on('change', this.bubbleChange) - if (!this.finished) this.emit('change', unit.name, this.completion[unit.id], unit) - return unit } -TrackerGroup.prototype.completed = function () { - if (this.trackers.length === 0) return 0 - var valPerWeight = 1 / this.totalWeight - var completed = 0 - for (var ii = 0; ii < this.trackers.length; ii++) { - var trackerId = this.trackers[ii].id - completed += valPerWeight * this.weight[trackerId] * this.completion[trackerId] - } - return completed -} +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); -TrackerGroup.prototype.newGroup = function (name, weight) { - return this.addUnit(new TrackerGroup(name), weight) -} + Readable.call(this, options); + Writable.call(this, options); -TrackerGroup.prototype.newItem = function (name, todo, weight) { - return this.addUnit(new Tracker(name, todo), weight) -} + if (options && options.readable === false) this.readable = false; -TrackerGroup.prototype.newStream = function (name, todo, weight) { - return this.addUnit(new TrackerStream(name, todo), weight) -} + if (options && options.writable === false) this.writable = false; -TrackerGroup.prototype.finish = function () { - this.finished = true - if (!this.trackers.length) this.addUnit(new Tracker(), 1, true) - for (var ii = 0; ii < this.trackers.length; ii++) { - var tracker = this.trackers[ii] - tracker.finish() - tracker.removeListener('change', this.bubbleChange) - } - this.emit('change', this.name, 1, this) -} + this.allowHalfOpen = true; + if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; -var buffer = ' ' -TrackerGroup.prototype.debug = function (depth) { - depth = depth || 0 - var indent = depth ? buffer.substr(0, depth) : '' - var output = indent + (this.name || 'top') + ': ' + this.completed() + '\n' - this.trackers.forEach(function (tracker) { - if (tracker instanceof TrackerGroup) { - output += tracker.debug(depth + 1) - } else { - output += indent + ' ' + tracker.name + ': ' + tracker.completed() + '\n' - } - }) - return output + this.once('end', onend); } +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); -/***/ }), - -/***/ 31375: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -var util = __nccwpck_require__(31669) -var stream = __nccwpck_require__(51642) -var delegate = __nccwpck_require__(61318) -var Tracker = __nccwpck_require__(8074) +// the no-half-open enforcer +function onend() { + // if we allow half-open state, or if the writable side ended, + // then we're ok. + if (this.allowHalfOpen || this._writableState.ended) return; -var TrackerStream = module.exports = function (name, size, options) { - stream.Transform.call(this, options) - this.tracker = new Tracker(name, size) - this.name = name - this.id = this.tracker.id - this.tracker.on('change', delegateChange(this)) + // no more data can be written. + // But allow more writes to happen in this tick. + pna.nextTick(onEndNT, this); } -util.inherits(TrackerStream, stream.Transform) -function delegateChange (trackerStream) { - return function (name, completion, tracker) { - trackerStream.emit('change', name, completion, trackerStream) - } +function onEndNT(self) { + self.end(); } -TrackerStream.prototype._transform = function (data, encoding, cb) { - this.tracker.completeWork(data.length ? data.length : 1) - this.push(data) - cb() -} +Object.defineProperty(Duplex.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } -TrackerStream.prototype._flush = function (cb) { - this.tracker.finish() - cb() -} + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); -delegate(TrackerStream.prototype, 'tracker') - .method('completed') - .method('addWork') - .method('finish') +Duplex.prototype._destroy = function (err, cb) { + this.push(null); + this.end(); + pna.nextTick(cb, err); +}; /***/ }), -/***/ 8074: +/***/ 61894: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. -var util = __nccwpck_require__(31669) -var TrackerBase = __nccwpck_require__(10165) - -var Tracker = module.exports = function (name, todo) { - TrackerBase.call(this, name) - this.workDone = 0 - this.workTodo = todo || 0 -} -util.inherits(Tracker, TrackerBase) - -Tracker.prototype.completed = function () { - return this.workTodo === 0 ? 0 : this.workDone / this.workTodo -} - -Tracker.prototype.addWork = function (work) { - this.workTodo += work - this.emit('change', this.name, this.completed(), this) -} - -Tracker.prototype.completeWork = function (work) { - this.workDone += work - if (this.workDone > this.workTodo) this.workDone = this.workTodo - this.emit('change', this.name, this.completed(), this) -} +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. -Tracker.prototype.finish = function () { - this.workTodo = this.workDone = 1 - this.emit('change', this.name, 1, this) -} -/***/ }), +module.exports = PassThrough; -/***/ 99348: -/***/ ((module) => { +var Transform = __nccwpck_require__(53937); -// Copyright 2011 Mark Cavage All rights reserved. +/**/ +var util = Object.create(__nccwpck_require__(95898)); +util.inherits = __nccwpck_require__(44124); +/**/ +util.inherits(PassThrough, Transform); -module.exports = { +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); - newInvalidAsn1Error: function (msg) { - var e = new Error(); - e.name = 'InvalidAsn1Error'; - e.message = msg || ''; - return e; - } + Transform.call(this, options); +} +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); }; - /***/ }), -/***/ 194: +/***/ 70661: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -// Copyright 2011 Mark Cavage All rights reserved. +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. -var errors = __nccwpck_require__(99348); -var types = __nccwpck_require__(42473); -var Reader = __nccwpck_require__(20290); -var Writer = __nccwpck_require__(43200); +/**/ -// --- Exports +var pna = __nccwpck_require__(47810); +/**/ -module.exports = { +module.exports = Readable; - Reader: Reader, +/**/ +var isArray = __nccwpck_require__(20893); +/**/ - Writer: Writer +/**/ +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = __nccwpck_require__(28614).EventEmitter; +var EElistenerCount = function (emitter, type) { + return emitter.listeners(type).length; }; +/**/ -for (var t in types) { - if (types.hasOwnProperty(t)) - module.exports[t] = types[t]; +/**/ +var Stream = __nccwpck_require__(69777); +/**/ + +/**/ + +var Buffer = __nccwpck_require__(95237).Buffer; +var OurUint8Array = global.Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); } -for (var e in errors) { - if (errors.hasOwnProperty(e)) - module.exports[e] = errors[e]; +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; } +/**/ -/***/ }), +/**/ +var util = Object.create(__nccwpck_require__(95898)); +util.inherits = __nccwpck_require__(44124); +/**/ -/***/ 20290: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/**/ +var debugUtil = __nccwpck_require__(31669); +var debug = void 0; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function () {}; +} +/**/ -// Copyright 2011 Mark Cavage All rights reserved. +var BufferList = __nccwpck_require__(52743); +var destroyImpl = __nccwpck_require__(69147); +var StringDecoder; -var assert = __nccwpck_require__(42357); -var Buffer = __nccwpck_require__(15118).Buffer; +util.inherits(Readable, Stream); -var ASN1 = __nccwpck_require__(42473); -var errors = __nccwpck_require__(99348); +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); -// --- Globals + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} -var newInvalidAsn1Error = errors.newInvalidAsn1Error; +function ReadableState(options, stream) { + Duplex = Duplex || __nccwpck_require__(92931); + options = options || {}; + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; -// --- API + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; -function Reader(data) { - if (!data || !Buffer.isBuffer(data)) - throw new TypeError('data must be a node Buffer'); + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; - this._buf = data; - this._size = data.length; + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + var hwm = options.highWaterMark; + var readableHwm = options.readableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; - // These hold the "current" state - this._len = 0; - this._offset = 0; -} + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm; -Object.defineProperty(Reader.prototype, 'length', { - enumerable: true, - get: function () { return (this._len); } -}); + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); -Object.defineProperty(Reader.prototype, 'offset', { - enumerable: true, - get: function () { return (this._offset); } -}); + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; -Object.defineProperty(Reader.prototype, 'remain', { - get: function () { return (this._size - this._offset); } -}); + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; -Object.defineProperty(Reader.prototype, 'buffer', { - get: function () { return (this._buf.slice(this._offset)); } -}); + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + // has it been destroyed + this.destroyed = false; -/** - * Reads a single byte and advances offset; you can pass in `true` to make this - * a "peek" operation (i.e., get the byte, but don't advance the offset). - * - * @param {Boolean} peek true means don't move offset. - * @return {Number} the next byte, null if not enough data. - */ -Reader.prototype.readByte = function (peek) { - if (this._size - this._offset < 1) - return null; + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; - var b = this._buf[this._offset] & 0xff; + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; - if (!peek) - this._offset += 1; + // if true, a maybeReadMore has been scheduled + this.readingMore = false; - return b; -}; + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = __nccwpck_require__(63824)/* .StringDecoder */ .s; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} +function Readable(options) { + Duplex = Duplex || __nccwpck_require__(92931); -Reader.prototype.peek = function () { - return this.readByte(true); -}; + if (!(this instanceof Readable)) return new Readable(options); + this._readableState = new ReadableState(options, this); -/** - * Reads a (potentially) variable length off the BER buffer. This call is - * not really meant to be called directly, as callers have to manipulate - * the internal buffer afterwards. - * - * As a result of this call, you can call `Reader.length`, until the - * next thing called that does a readLength. - * - * @return {Number} the amount of offset to advance the buffer. - * @throws {InvalidAsn1Error} on bad ASN.1 - */ -Reader.prototype.readLength = function (offset) { - if (offset === undefined) - offset = this._offset; + // legacy + this.readable = true; - if (offset >= this._size) - return null; + if (options) { + if (typeof options.read === 'function') this._read = options.read; - var lenB = this._buf[offset++] & 0xff; - if (lenB === null) - return null; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } - if ((lenB & 0x80) === 0x80) { - lenB &= 0x7f; + Stream.call(this); +} - if (lenB === 0) - throw newInvalidAsn1Error('Indefinite length not supported'); +Object.defineProperty(Readable.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } - if (lenB > 4) - throw newInvalidAsn1Error('encoding too long'); + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); - if (this._size - offset < lenB) - return null; +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + this.push(null); + cb(err); +}; - this._len = 0; - for (var i = 0; i < lenB; i++) - this._len = (this._len << 8) + (this._buf[offset++] & 0xff); +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } } else { - // Wasn't a variable length - this._len = lenB; + skipChunkCheck = true; } - return offset; + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); }; +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; -/** - * Parses the next sequence in this BER buffer. - * - * To get the length of the sequence, call `Reader.length`. - * - * @return {Number} the sequence's tag. - */ -Reader.prototype.readSequence = function (tag) { - var seq = this.peek(); - if (seq === null) - return null; - if (tag !== undefined && tag !== seq) - throw newInvalidAsn1Error('Expected 0x' + tag.toString(16) + - ': got 0x' + seq.toString(16)); - - var o = this.readLength(this._offset + 1); // stored in `length` - if (o === null) - return null; +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + stream.emit('error', er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } - this._offset = o; - return seq; -}; + if (addToFront) { + if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true); + } else if (state.ended) { + stream.emit('error', new Error('stream.push() after EOF')); + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + } + } + return needMoreData(state); +} -Reader.prototype.readInt = function () { - return this._readTag(ASN1.Integer); -}; +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + stream.emit('data', chunk); + stream.read(0); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} -Reader.prototype.readBoolean = function () { - return (this._readTag(ASN1.Boolean) === 0 ? false : true); -}; +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + return er; +} +// if it's past the high water mark, we can push in some more. +// Also, if we have no data yet, we can stand some +// more bytes. This is to work around cases where hwm=0, +// such as the repl. Also, if the push() triggered a +// readable event, and the user called read(largeNumber) such that +// needReadable was set, then we ought to push more, so that another +// 'readable' event will be triggered. +function needMoreData(state) { + return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); +} -Reader.prototype.readEnumeration = function () { - return this._readTag(ASN1.Enumeration); +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; }; +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = __nccwpck_require__(63824)/* .StringDecoder */ .s; + this._readableState.decoder = new StringDecoder(enc); + this._readableState.encoding = enc; + return this; +}; -Reader.prototype.readString = function (tag, retbuf) { - if (!tag) - tag = ASN1.OctetString; - - var b = this.peek(); - if (b === null) - return null; +// Don't raise the hwm > 8MB +var MAX_HWM = 0x800000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} - if (b !== tag) - throw newInvalidAsn1Error('Expected 0x' + tag.toString(16) + - ': got 0x' + b.toString(16)); +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} - var o = this.readLength(this._offset + 1); // stored in `length` +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; - if (o === null) - return null; + if (n !== 0) state.emittedReadable = false; - if (this.length > this._size - o) + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); return null; + } - this._offset = o; - - if (this.length === 0) - return retbuf ? Buffer.alloc(0) : ''; - - var str = this._buf.slice(this._offset, this._offset + this.length); - this._offset += this.length; - - return retbuf ? str : str.toString('utf8'); -}; - -Reader.prototype.readOID = function (tag) { - if (!tag) - tag = ASN1.OID; + n = howMuchToRead(n, state); - var b = this.readString(tag, true); - if (b === null) + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); return null; - - var values = []; - var value = 0; - - for (var i = 0; i < b.length; i++) { - var byte = b[i] & 0xff; - - value <<= 7; - value += byte & 0x7f; - if ((byte & 0x80) === 0) { - values.push(value); - value = 0; - } } - value = values.shift(); - values.unshift(value % 40); - values.unshift((value / 40) >> 0); - - return values.join('.'); -}; - - -Reader.prototype._readTag = function (tag) { - assert.ok(tag !== undefined); - - var b = this.peek(); + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. - if (b === null) - return null; + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); - if (b !== tag) - throw newInvalidAsn1Error('Expected 0x' + tag.toString(16) + - ': got 0x' + b.toString(16)); + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } - var o = this.readLength(this._offset + 1); // stored in `length` - if (o === null) - return null; + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } - if (this.length > 4) - throw newInvalidAsn1Error('Integer too long: ' + this.length); + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; - if (this.length > this._size - o) - return null; - this._offset = o; + if (ret === null) { + state.needReadable = true; + n = 0; + } else { + state.length -= n; + } - var fb = this._buf[this._offset]; - var value = 0; + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; - for (var i = 0; i < this.length; i++) { - value <<= 8; - value |= (this._buf[this._offset++] & 0xff); + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); } - if ((fb & 0x80) === 0x80 && i !== 4) - value -= (1 << (i * 8)); + if (ret !== null) this.emit('data', ret); - return value >> 0; + return ret; }; +function onEofChunk(stream, state) { + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + // emit 'readable' now to make sure it gets picked up. + emitReadable(stream); +} -// --- Exported API - -module.exports = Reader; - - -/***/ }), +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream); + } +} -/***/ 42473: -/***/ ((module) => { +function emitReadable_(stream) { + debug('emit readable'); + stream.emit('readable'); + flow(stream); +} -// Copyright 2011 Mark Cavage All rights reserved. +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + pna.nextTick(maybeReadMore_, stream, state); + } +} +function maybeReadMore_(stream, state) { + var len = state.length; + while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break;else len = state.length; + } + state.readingMore = false; +} -module.exports = { - EOC: 0, - Boolean: 1, - Integer: 2, - BitString: 3, - OctetString: 4, - Null: 5, - OID: 6, - ObjectDescriptor: 7, - External: 8, - Real: 9, // float - Enumeration: 10, - PDV: 11, - Utf8String: 12, - RelativeOID: 13, - Sequence: 16, - Set: 17, - NumericString: 18, - PrintableString: 19, - T61String: 20, - VideotexString: 21, - IA5String: 22, - UTCTime: 23, - GeneralizedTime: 24, - GraphicString: 25, - VisibleString: 26, - GeneralString: 28, - UniversalString: 29, - CharacterString: 30, - BMPString: 31, - Constructor: 32, - Context: 128 +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + this.emit('error', new Error('_read() is not implemented')); }; +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; -/***/ }), - -/***/ 43200: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -// Copyright 2011 Mark Cavage All rights reserved. + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); -var assert = __nccwpck_require__(42357); -var Buffer = __nccwpck_require__(15118).Buffer; -var ASN1 = __nccwpck_require__(42473); -var errors = __nccwpck_require__(99348); + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn); -// --- Globals + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } -var newInvalidAsn1Error = errors.newInvalidAsn1Error; + function onend() { + debug('onend'); + dest.end(); + } -var DEFAULT_OPTS = { - size: 1024, - growthFactor: 8 -}; + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); -// --- Helpers + cleanedUp = true; -function merge(from, to) { - assert.ok(from); - assert.equal(typeof (from), 'object'); - assert.ok(to); - assert.equal(typeof (to), 'object'); + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } - var keys = Object.getOwnPropertyNames(from); - keys.forEach(function (key) { - if (to[key]) - return; + // If the user pushes more data while we're writing to dest then we'll end up + // in ondata again. However, we only want to increase awaitDrain once because + // dest will only emit one 'drain' event for the multiple writes. + // => Introduce a guard on increasing awaitDrain. + var increasedAwaitDrain = false; + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + increasedAwaitDrain = false; + var ret = dest.write(chunk); + if (false === ret && !increasedAwaitDrain) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', src._readableState.awaitDrain); + src._readableState.awaitDrain++; + increasedAwaitDrain = true; + } + src.pause(); + } + } - var value = Object.getOwnPropertyDescriptor(from, key); - Object.defineProperty(to, key, value); - }); + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); + } - return to; -} + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } -// --- API + // tell the dest that it's being piped to + dest.emit('pipe', src); -function Writer(options) { - options = merge(DEFAULT_OPTS, options || {}); + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } - this._buf = Buffer.alloc(options.size || 1024); - this._size = this._buf.length; - this._offset = 0; - this._options = options; + return dest; +}; - // A list of offsets in the buffer where we need to insert - // sequence tag/len pairs. - this._seq = []; +function pipeOnDrain(src) { + return function () { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; } -Object.defineProperty(Writer.prototype, 'buffer', { - get: function () { - if (this._seq.length) - throw newInvalidAsn1Error(this._seq.length + ' unended sequence(s)'); - - return (this._buf.slice(0, this._offset)); - } -}); - -Writer.prototype.writeByte = function (b) { - if (typeof (b) !== 'number') - throw new TypeError('argument must be a Number'); - - this._ensure(1); - this._buf[this._offset++] = b; -}; +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { hasUnpiped: false }; + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; -Writer.prototype.writeInt = function (i, tag) { - if (typeof (i) !== 'number') - throw new TypeError('argument must be a Number'); - if (typeof (tag) !== 'number') - tag = ASN1.Integer; + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; - var sz = 4; + if (!dest) dest = state.pipes; - while ((((i & 0xff800000) === 0) || ((i & 0xff800000) === 0xff800000 >> 0)) && - (sz > 1)) { - sz--; - i <<= 8; + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; } - if (sz > 4) - throw newInvalidAsn1Error('BER ints cannot be > 0xffffffff'); + // slow case. multiple pipe destinations. - this._ensure(2 + sz); - this._buf[this._offset++] = tag; - this._buf[this._offset++] = sz; + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; - while (sz-- > 0) { - this._buf[this._offset++] = ((i & 0xff000000) >>> 24); - i <<= 8; + for (var i = 0; i < len; i++) { + dests[i].emit('unpipe', this, unpipeInfo); + }return this; } -}; - - -Writer.prototype.writeNull = function () { - this.writeByte(ASN1.Null); - this.writeByte(0x00); -}; + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; -Writer.prototype.writeEnumeration = function (i, tag) { - if (typeof (i) !== 'number') - throw new TypeError('argument must be a Number'); - if (typeof (tag) !== 'number') - tag = ASN1.Enumeration; + dest.emit('unpipe', this, unpipeInfo); - return this.writeInt(i, tag); + return this; }; +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); -Writer.prototype.writeBoolean = function (b, tag) { - if (typeof (b) !== 'boolean') - throw new TypeError('argument must be a Boolean'); - if (typeof (tag) !== 'number') - tag = ASN1.Boolean; + if (ev === 'data') { + // Start flowing on next tick if stream isn't explicitly paused + if (this._readableState.flowing !== false) this.resume(); + } else if (ev === 'readable') { + var state = this._readableState; + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.emittedReadable = false; + if (!state.reading) { + pna.nextTick(nReadingNextTick, this); + } else if (state.length) { + emitReadable(this); + } + } + } - this._ensure(3); - this._buf[this._offset++] = tag; - this._buf[this._offset++] = 0x01; - this._buf[this._offset++] = b ? 0xff : 0x00; + return res; }; +Readable.prototype.addListener = Readable.prototype.on; +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} -Writer.prototype.writeString = function (s, tag) { - if (typeof (s) !== 'string') - throw new TypeError('argument must be a string (was: ' + typeof (s) + ')'); - if (typeof (tag) !== 'number') - tag = ASN1.OctetString; - - var len = Buffer.byteLength(s); - this.writeByte(tag); - this.writeLength(len); - if (len) { - this._ensure(len); - this._buf.write(s, this._offset); - this._offset += len; +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + state.flowing = true; + resume(this, state); } + return this; }; +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + pna.nextTick(resume_, stream, state); + } +} -Writer.prototype.writeBuffer = function (buf, tag) { - if (typeof (tag) !== 'number') - throw new TypeError('tag must be a number'); - if (!Buffer.isBuffer(buf)) - throw new TypeError('argument must be a buffer'); - - this.writeByte(tag); - this.writeLength(buf.length); - this._ensure(buf.length); - buf.copy(this._buf, this._offset, 0, buf.length); - this._offset += buf.length; -}; - +function resume_(stream, state) { + if (!state.reading) { + debug('resume read 0'); + stream.read(0); + } -Writer.prototype.writeStringArray = function (strings) { - if ((!strings instanceof Array)) - throw new TypeError('argument must be an Array[String]'); + state.resumeScheduled = false; + state.awaitDrain = 0; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} - var self = this; - strings.forEach(function (s) { - self.writeString(s); - }); +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (false !== this._readableState.flowing) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + return this; }; -// This is really to solve DER cases, but whatever for now -Writer.prototype.writeOID = function (s, tag) { - if (typeof (s) !== 'string') - throw new TypeError('argument must be a string'); - if (typeof (tag) !== 'number') - tag = ASN1.OID; +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null) {} +} - if (!/^([0-9]+\.){3,}[0-9]+$/.test(s)) - throw new Error('argument is not a valid OID string'); +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; - function encodeOctet(bytes, octet) { - if (octet < 128) { - bytes.push(octet); - } else if (octet < 16384) { - bytes.push((octet >>> 7) | 0x80); - bytes.push(octet & 0x7F); - } else if (octet < 2097152) { - bytes.push((octet >>> 14) | 0x80); - bytes.push(((octet >>> 7) | 0x80) & 0xFF); - bytes.push(octet & 0x7F); - } else if (octet < 268435456) { - bytes.push((octet >>> 21) | 0x80); - bytes.push(((octet >>> 14) | 0x80) & 0xFF); - bytes.push(((octet >>> 7) | 0x80) & 0xFF); - bytes.push(octet & 0x7F); - } else { - bytes.push(((octet >>> 28) | 0x80) & 0xFF); - bytes.push(((octet >>> 21) | 0x80) & 0xFF); - bytes.push(((octet >>> 14) | 0x80) & 0xFF); - bytes.push(((octet >>> 7) | 0x80) & 0xFF); - bytes.push(octet & 0x7F); + var state = this._readableState; + var paused = false; + + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); } - } - var tmp = s.split('.'); - var bytes = []; - bytes.push(parseInt(tmp[0], 10) * 40 + parseInt(tmp[1], 10)); - tmp.slice(2).forEach(function (b) { - encodeOctet(bytes, parseInt(b, 10)); + _this.push(null); }); - var self = this; - this._ensure(2 + bytes.length); - this.writeByte(tag); - this.writeLength(bytes.length); - bytes.forEach(function (b) { - self.writeByte(b); - }); -}; + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; -Writer.prototype.writeLength = function (len) { - if (typeof (len) !== 'number') - throw new TypeError('argument must be a Number'); + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); - this._ensure(4); + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function (method) { + return function () { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } - if (len <= 0x7f) { - this._buf[this._offset++] = len; - } else if (len <= 0xff) { - this._buf[this._offset++] = 0x81; - this._buf[this._offset++] = len; - } else if (len <= 0xffff) { - this._buf[this._offset++] = 0x82; - this._buf[this._offset++] = len >> 8; - this._buf[this._offset++] = len; - } else if (len <= 0xffffff) { - this._buf[this._offset++] = 0x83; - this._buf[this._offset++] = len >> 16; - this._buf[this._offset++] = len >> 8; - this._buf[this._offset++] = len; - } else { - throw newInvalidAsn1Error('Length too long (> 4 bytes)'); + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); } -}; -Writer.prototype.startSequence = function (tag) { - if (typeof (tag) !== 'number') - tag = ASN1.Sequence | ASN1.Constructor; + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; - this.writeByte(tag); - this._seq.push(this._offset); - this._ensure(3); - this._offset += 3; + return this; }; - -Writer.prototype.endSequence = function () { - var seq = this._seq.pop(); - var start = seq + 3; - var len = this._offset - start; - - if (len <= 0x7f) { - this._shift(start, len, -2); - this._buf[seq] = len; - } else if (len <= 0xff) { - this._shift(start, len, -1); - this._buf[seq] = 0x81; - this._buf[seq + 1] = len; - } else if (len <= 0xffff) { - this._buf[seq] = 0x82; - this._buf[seq + 1] = len >> 8; - this._buf[seq + 2] = len; - } else if (len <= 0xffffff) { - this._shift(start, len, 1); - this._buf[seq] = 0x83; - this._buf[seq + 1] = len >> 16; - this._buf[seq + 2] = len >> 8; - this._buf[seq + 3] = len; - } else { - throw newInvalidAsn1Error('Sequence too long'); +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._readableState.highWaterMark; } -}; +}); +// exposed for testing purposes only. +Readable._fromList = fromList; -Writer.prototype._shift = function (start, len, shift) { - assert.ok(start !== undefined); - assert.ok(len !== undefined); - assert.ok(shift); +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; - this._buf.copy(this._buf, start + shift, start, start + len); - this._offset += shift; -}; + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = fromListPartial(n, state.buffer, state.decoder); + } -Writer.prototype._ensure = function (len) { - assert.ok(len); + return ret; +} - if (this._size - this._offset < len) { - var sz = this._size * this._options.growthFactor; - if (sz - this._offset < len) - sz += len; +// Extracts only enough buffered data to satisfy the amount requested. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromListPartial(n, list, hasStrings) { + var ret; + if (n < list.head.data.length) { + // slice is the same for buffers and strings + ret = list.head.data.slice(0, n); + list.head.data = list.head.data.slice(n); + } else if (n === list.head.data.length) { + // first chunk is a perfect match + ret = list.shift(); + } else { + // result spans more than one buffer + ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); + } + return ret; +} - var buf = Buffer.alloc(sz); +// Copies a specified amount of characters from the list of buffered data +// chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBufferString(n, list) { + var p = list.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} - this._buf.copy(buf, 0, 0, this._offset); - this._buf = buf; - this._size = sz; +// Copies a specified amount of bytes from the list of buffered data chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBuffer(n, list) { + var ret = Buffer.allocUnsafe(n); + var p = list.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; } -}; + list.length -= c; + return ret; +} +function endReadable(stream) { + var state = stream._readableState; + // If we get here before consuming all the bytes, then that is a + // bug in node. Should never happen. + if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); -// --- Exported API + if (!state.endEmitted) { + state.ended = true; + pna.nextTick(endReadableNT, state, stream); + } +} -module.exports = Writer; +function endReadableNT(state, stream) { + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + } +} +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} /***/ }), -/***/ 80970: +/***/ 53937: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -// Copyright 2011 Mark Cavage All rights reserved. +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. -// If you have no idea what ASN.1 or BER is, see this: -// ftp://ftp.rsa.com/pub/pkcs/ascii/layman.asc +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. -var Ber = __nccwpck_require__(194); +module.exports = Transform; -// --- Exported API +var Duplex = __nccwpck_require__(92931); -module.exports = { +/**/ +var util = Object.create(__nccwpck_require__(95898)); +util.inherits = __nccwpck_require__(44124); +/**/ - Ber: Ber, +util.inherits(Transform, Duplex); - BerReader: Ber.Reader, +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; - BerWriter: Ber.Writer + var cb = ts.writecb; -}; + if (!cb) { + return this.emit('error', new Error('write callback called multiple times')); + } + ts.writechunk = null; + ts.writecb = null; -/***/ }), + if (data != null) // single equals check for both `null` and `undefined` + this.push(data); -/***/ 66631: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + cb(er); -// Copyright (c) 2012, Mark Cavage. All rights reserved. -// Copyright 2015 Joyent, Inc. + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} -var assert = __nccwpck_require__(42357); -var Stream = __nccwpck_require__(92413).Stream; -var util = __nccwpck_require__(31669); +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + Duplex.call(this, options); -///--- Globals + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; -/* JSSTYLED */ -var UUID_REGEXP = /^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$/; + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; -///--- Internal + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; -function _capitalize(str) { - return (str.charAt(0).toUpperCase() + str.slice(1)); -} + if (typeof options.flush === 'function') this._flush = options.flush; + } -function _toss(name, expected, oper, arg, actual) { - throw new assert.AssertionError({ - message: util.format('%s (%s) is required', name, expected), - actual: (actual === undefined) ? typeof (arg) : actual(arg), - expected: expected, - operator: oper || '===', - stackStartFunction: _toss.caller - }); + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); } -function _getClass(arg) { - return (Object.prototype.toString.call(arg).slice(8, -1)); -} +function prefinish() { + var _this = this; -function noop() { - // Why even bother with asserts? + if (typeof this._flush === 'function') { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } } +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; -///--- Exports +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + throw new Error('_transform() is not implemented'); +}; -var types = { - bool: { - check: function (arg) { return typeof (arg) === 'boolean'; } - }, - func: { - check: function (arg) { return typeof (arg) === 'function'; } - }, - string: { - check: function (arg) { return typeof (arg) === 'string'; } - }, - object: { - check: function (arg) { - return typeof (arg) === 'object' && arg !== null; - } - }, - number: { - check: function (arg) { - return typeof (arg) === 'number' && !isNaN(arg); - } - }, - finite: { - check: function (arg) { - return typeof (arg) === 'number' && !isNaN(arg) && isFinite(arg); - } - }, - buffer: { - check: function (arg) { return Buffer.isBuffer(arg); }, - operator: 'Buffer.isBuffer' - }, - array: { - check: function (arg) { return Array.isArray(arg); }, - operator: 'Array.isArray' - }, - stream: { - check: function (arg) { return arg instanceof Stream; }, - operator: 'instanceof', - actual: _getClass - }, - date: { - check: function (arg) { return arg instanceof Date; }, - operator: 'instanceof', - actual: _getClass - }, - regexp: { - check: function (arg) { return arg instanceof RegExp; }, - operator: 'instanceof', - actual: _getClass - }, - uuid: { - check: function (arg) { - return typeof (arg) === 'string' && UUID_REGEXP.test(arg); - }, - operator: 'isUUID' - } +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } }; -function _setExports(ndebug) { - var keys = Object.keys(types); - var out; +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; - /* re-export standard assert */ - if (process.env.NODE_NDEBUG) { - out = noop; - } else { - out = function (arg, msg) { - if (!arg) { - _toss(msg, 'true', arg); - } - }; - } + if (ts.writechunk !== null && ts.writecb && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; - /* standard checks */ - keys.forEach(function (k) { - if (ndebug) { - out[k] = noop; - return; - } - var type = types[k]; - out[k] = function (arg, msg) { - if (!type.check(arg)) { - _toss(msg, k, type.operator, arg, type.actual); - } - }; - }); +Transform.prototype._destroy = function (err, cb) { + var _this2 = this; - /* optional checks */ - keys.forEach(function (k) { - var name = 'optional' + _capitalize(k); - if (ndebug) { - out[name] = noop; - return; - } - var type = types[k]; - out[name] = function (arg, msg) { - if (arg === undefined || arg === null) { - return; - } - if (!type.check(arg)) { - _toss(msg, k, type.operator, arg, type.actual); - } - }; - }); + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + _this2.emit('close'); + }); +}; - /* arrayOf checks */ - keys.forEach(function (k) { - var name = 'arrayOf' + _capitalize(k); - if (ndebug) { - out[name] = noop; - return; - } - var type = types[k]; - var expected = '[' + k + ']'; - out[name] = function (arg, msg) { - if (!Array.isArray(arg)) { - _toss(msg, expected, type.operator, arg, type.actual); - } - var i; - for (i = 0; i < arg.length; i++) { - if (!type.check(arg[i])) { - _toss(msg, expected, type.operator, arg, type.actual); - } - } - }; - }); +function done(stream, er, data) { + if (er) return stream.emit('error', er); - /* optionalArrayOf checks */ - keys.forEach(function (k) { - var name = 'optionalArrayOf' + _capitalize(k); - if (ndebug) { - out[name] = noop; - return; - } - var type = types[k]; - var expected = '[' + k + ']'; - out[name] = function (arg, msg) { - if (arg === undefined || arg === null) { - return; - } - if (!Array.isArray(arg)) { - _toss(msg, expected, type.operator, arg, type.actual); - } - var i; - for (i = 0; i < arg.length; i++) { - if (!type.check(arg[i])) { - _toss(msg, expected, type.operator, arg, type.actual); - } - } - }; - }); + if (data != null) // single equals check for both `null` and `undefined` + stream.push(data); - /* re-export built-in assertions */ - Object.keys(assert).forEach(function (k) { - if (k === 'AssertionError') { - out[k] = assert[k]; - return; - } - if (ndebug) { - out[k] = noop; - return; - } - out[k] = assert[k]; - }); + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0'); - /* export ourselves (for unit tests _only_) */ - out._setExports = _setExports; + if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming'); - return out; + return stream.push(null); } -module.exports = _setExports(process.env.NODE_NDEBUG); +/***/ }), +/***/ 7631: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/***/ }), +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. -/***/ 57888: -/***/ (function(__unused_webpack_module, exports) { +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. -(function (global, factory) { - true ? factory(exports) : - 0; -}(this, (function (exports) { 'use strict'; - /** - * Creates a continuation function with some arguments already applied. - * - * Useful as a shorthand when combined with other control flow functions. Any - * arguments passed to the returned function are added to the arguments - * originally passed to apply. - * - * @name apply - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {Function} fn - The function you want to eventually apply all - * arguments to. Invokes with (arguments...). - * @param {...*} arguments... - Any number of arguments to automatically apply - * when the continuation is called. - * @returns {Function} the partially-applied function - * @example - * - * // using apply - * async.parallel([ - * async.apply(fs.writeFile, 'testfile1', 'test1'), - * async.apply(fs.writeFile, 'testfile2', 'test2') - * ]); - * - * - * // the same process without using apply - * async.parallel([ - * function(callback) { - * fs.writeFile('testfile1', 'test1', callback); - * }, - * function(callback) { - * fs.writeFile('testfile2', 'test2', callback); - * } - * ]); - * - * // It's possible to pass any number of additional arguments when calling the - * // continuation: - * - * node> var fn = async.apply(sys.puts, 'one'); - * node> fn('two', 'three'); - * one - * two - * three - */ - function apply(fn, ...args) { - return (...callArgs) => fn(...args,...callArgs); - } - function initialParams (fn) { - return function (...args/*, callback*/) { - var callback = args.pop(); - return fn.call(this, args, callback); - }; - } +/**/ - /* istanbul ignore file */ +var pna = __nccwpck_require__(47810); +/**/ - var hasSetImmediate = typeof setImmediate === 'function' && setImmediate; - var hasNextTick = typeof process === 'object' && typeof process.nextTick === 'function'; +module.exports = Writable; - function fallback(fn) { - setTimeout(fn, 0); - } +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} - function wrap(defer) { - return (fn, ...args) => defer(() => fn(...args)); - } +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; - var _defer; + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ - if (hasSetImmediate) { - _defer = setImmediate; - } else if (hasNextTick) { - _defer = process.nextTick; - } else { - _defer = fallback; - } +/**/ +var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick; +/**/ - var setImmediate$1 = wrap(_defer); +/**/ +var Duplex; +/**/ - /** - * Take a sync function and make it async, passing its return value to a - * callback. This is useful for plugging sync functions into a waterfall, - * series, or other async functions. Any arguments passed to the generated - * function will be passed to the wrapped function (except for the final - * callback argument). Errors thrown will be passed to the callback. - * - * If the function passed to `asyncify` returns a Promise, that promises's - * resolved/rejected state will be used to call the callback, rather than simply - * the synchronous return value. - * - * This also means you can asyncify ES2017 `async` functions. - * - * @name asyncify - * @static - * @memberOf module:Utils - * @method - * @alias wrapSync - * @category Util - * @param {Function} func - The synchronous function, or Promise-returning - * function to convert to an {@link AsyncFunction}. - * @returns {AsyncFunction} An asynchronous wrapper of the `func`. To be - * invoked with `(args..., callback)`. - * @example - * - * // passing a regular synchronous function - * async.waterfall([ - * async.apply(fs.readFile, filename, "utf8"), - * async.asyncify(JSON.parse), - * function (data, next) { - * // data is the result of parsing the text. - * // If there was a parsing error, it would have been caught. - * } - * ], callback); - * - * // passing a function returning a promise - * async.waterfall([ - * async.apply(fs.readFile, filename, "utf8"), - * async.asyncify(function (contents) { - * return db.model.create(contents); - * }), - * function (model, next) { - * // `model` is the instantiated model object. - * // If there was an error, this function would be skipped. - * } - * ], callback); - * - * // es2017 example, though `asyncify` is not needed if your JS environment - * // supports async functions out of the box - * var q = async.queue(async.asyncify(async function(file) { - * var intermediateStep = await processFile(file); - * return await somePromise(intermediateStep) - * })); - * - * q.push(files); - */ - function asyncify(func) { - if (isAsync(func)) { - return function (...args/*, callback*/) { - const callback = args.pop(); - const promise = func.apply(this, args); - return handlePromise(promise, callback) - } - } +Writable.WritableState = WritableState; - return initialParams(function (args, callback) { - var result; - try { - result = func.apply(this, args); - } catch (e) { - return callback(e); - } - // if result is Promise object - if (result && typeof result.then === 'function') { - return handlePromise(result, callback) - } else { - callback(null, result); - } - }); - } +/**/ +var util = Object.create(__nccwpck_require__(95898)); +util.inherits = __nccwpck_require__(44124); +/**/ - function handlePromise(promise, callback) { - return promise.then(value => { - invokeCallback(callback, null, value); - }, err => { - invokeCallback(callback, err && err.message ? err : new Error(err)); - }); - } +/**/ +var internalUtil = { + deprecate: __nccwpck_require__(65278) +}; +/**/ - function invokeCallback(callback, error, value) { - try { - callback(error, value); - } catch (err) { - setImmediate$1(e => { throw e }, err); - } - } +/**/ +var Stream = __nccwpck_require__(69777); +/**/ - function isAsync(fn) { - return fn[Symbol.toStringTag] === 'AsyncFunction'; - } +/**/ - function isAsyncGenerator(fn) { - return fn[Symbol.toStringTag] === 'AsyncGenerator'; - } +var Buffer = __nccwpck_require__(95237).Buffer; +var OurUint8Array = global.Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} - function isAsyncIterable(obj) { - return typeof obj[Symbol.asyncIterator] === 'function'; - } +/**/ - function wrapAsync(asyncFn) { - if (typeof asyncFn !== 'function') throw new Error('expected a function') - return isAsync(asyncFn) ? asyncify(asyncFn) : asyncFn; - } +var destroyImpl = __nccwpck_require__(69147); - // conditionally promisify a function. - // only return a promise if a callback is omitted - function awaitify (asyncFn, arity = asyncFn.length) { - if (!arity) throw new Error('arity is undefined') - function awaitable (...args) { - if (typeof args[arity - 1] === 'function') { - return asyncFn.apply(this, args) - } +util.inherits(Writable, Stream); - return new Promise((resolve, reject) => { - args[arity - 1] = (err, ...cbArgs) => { - if (err) return reject(err) - resolve(cbArgs.length > 1 ? cbArgs : cbArgs[0]); - }; - asyncFn.apply(this, args); - }) - } +function nop() {} - return awaitable - } +function WritableState(options, stream) { + Duplex = Duplex || __nccwpck_require__(92931); - function applyEach (eachfn) { - return function applyEach(fns, ...callArgs) { - const go = awaitify(function (callback) { - var that = this; - return eachfn(fns, (fn, cb) => { - wrapAsync(fn).apply(that, callArgs.concat(cb)); - }, callback); - }); - return go; - }; - } + options = options || {}; - function _asyncMap(eachfn, arr, iteratee, callback) { - arr = arr || []; - var results = []; - var counter = 0; - var _iteratee = wrapAsync(iteratee); + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; - return eachfn(arr, (value, _, iterCb) => { - var index = counter++; - _iteratee(value, (err, v) => { - results[index] = v; - iterCb(err); - }); - }, err => { - callback(err, results); - }); - } + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; - function isArrayLike(value) { - return value && - typeof value.length === 'number' && - value.length >= 0 && - value.length % 1 === 0; - } + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; - // A temporary value used to identify if the loop should be broken. - // See #1064, #1293 - const breakLoop = {}; + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + var hwm = options.highWaterMark; + var writableHwm = options.writableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; - function once(fn) { - function wrapper (...args) { - if (fn === null) return; - var callFn = fn; - fn = null; - callFn.apply(this, args); - } - Object.assign(wrapper, fn); - return wrapper - } + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm; - function getIterator (coll) { - return coll[Symbol.iterator] && coll[Symbol.iterator](); - } + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); - function createArrayIterator(coll) { - var i = -1; - var len = coll.length; - return function next() { - return ++i < len ? {value: coll[i], key: i} : null; - } - } + // if _final has been called + this.finalCalled = false; - function createES2015Iterator(iterator) { - var i = -1; - return function next() { - var item = iterator.next(); - if (item.done) - return null; - i++; - return {value: item.value, key: i}; - } - } + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; - function createObjectIterator(obj) { - var okeys = obj ? Object.keys(obj) : []; - var i = -1; - var len = okeys.length; - return function next() { - var key = okeys[++i]; - return i < len ? {value: obj[key], key} : null; - }; - } + // has it been destroyed + this.destroyed = false; - function createIterator(coll) { - if (isArrayLike(coll)) { - return createArrayIterator(coll); - } + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; - var iterator = getIterator(coll); - return iterator ? createES2015Iterator(iterator) : createObjectIterator(coll); - } + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; - function onlyOnce(fn) { - return function (...args) { - if (fn === null) throw new Error("Callback was already called."); - var callFn = fn; - fn = null; - callFn.apply(this, args); - }; - } + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; - // for async generators - function asyncEachOfLimit(generator, limit, iteratee, callback) { - let done = false; - let canceled = false; - let awaiting = false; - let running = 0; - let idx = 0; + // a flag to see when we're in the middle of a write. + this.writing = false; - function replenish() { - //console.log('replenish') - if (running >= limit || awaiting || done) return - //console.log('replenish awaiting') - awaiting = true; - generator.next().then(({value, done: iterDone}) => { - //console.log('got value', value) - if (canceled || done) return - awaiting = false; - if (iterDone) { - done = true; - if (running <= 0) { - //console.log('done nextCb') - callback(null); - } - return; - } - running++; - iteratee(value, idx, iterateeCallback); - idx++; - replenish(); - }).catch(handleError); - } + // when true all writes will be buffered until .uncork() call + this.corked = 0; - function iterateeCallback(err, result) { - //console.log('iterateeCallback') - running -= 1; - if (canceled) return - if (err) return handleError(err) + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; - if (err === false) { - done = true; - canceled = true; - return - } + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; - if (result === breakLoop || (done && running <= 0)) { - done = true; - //console.log('done iterCb') - return callback(null); - } - replenish(); - } + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; - function handleError(err) { - if (canceled) return - awaiting = false; - done = true; - callback(err); - } + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; - replenish(); - } + // the amount that is being written when _write is called. + this.writelen = 0; - var eachOfLimit = (limit) => { - return (obj, iteratee, callback) => { - callback = once(callback); - if (limit <= 0) { - throw new RangeError('concurrency limit cannot be less than 1') - } - if (!obj) { - return callback(null); - } - if (isAsyncGenerator(obj)) { - return asyncEachOfLimit(obj, limit, iteratee, callback) - } - if (isAsyncIterable(obj)) { - return asyncEachOfLimit(obj[Symbol.asyncIterator](), limit, iteratee, callback) - } - var nextElem = createIterator(obj); - var done = false; - var canceled = false; - var running = 0; - var looping = false; + this.bufferedRequest = null; + this.lastBufferedRequest = null; - function iterateeCallback(err, value) { - if (canceled) return - running -= 1; - if (err) { - done = true; - callback(err); - } - else if (err === false) { - done = true; - canceled = true; - } - else if (value === breakLoop || (done && running <= 0)) { - done = true; - return callback(null); - } - else if (!looping) { - replenish(); - } - } + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; - function replenish () { - looping = true; - while (running < limit && !done) { - var elem = nextElem(); - if (elem === null) { - done = true; - if (running <= 0) { - callback(null); - } - return; - } - running += 1; - iteratee(elem.value, elem.key, onlyOnce(iterateeCallback)); - } - looping = false; - } + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; - replenish(); - }; - }; + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; - /** - * The same as [`eachOf`]{@link module:Collections.eachOf} but runs a maximum of `limit` async operations at a - * time. - * - * @name eachOfLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.eachOf]{@link module:Collections.eachOf} - * @alias forEachOfLimit - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async function to apply to each - * item in `coll`. The `key` is the item's key, or index in the case of an - * array. - * Invoked with (item, key, callback). - * @param {Function} [callback] - A callback which is called when all - * `iteratee` functions have finished, or an error occurs. Invoked with (err). - * @returns {Promise} a promise, if a callback is omitted - */ - function eachOfLimit$1(coll, limit, iteratee, callback) { - return eachOfLimit(limit)(coll, wrapAsync(iteratee), callback); - } + // count buffered requests + this.bufferedRequestCount = 0; - var eachOfLimit$2 = awaitify(eachOfLimit$1, 4); + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} - // eachOf implementation optimized for array-likes - function eachOfArrayLike(coll, iteratee, callback) { - callback = once(callback); - var index = 0, - completed = 0, - {length} = coll, - canceled = false; - if (length === 0) { - callback(null); - } +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; - function iteratorCallback(err, value) { - if (err === false) { - canceled = true; - } - if (canceled === true) return - if (err) { - callback(err); - } else if ((++completed === length) || value === breakLoop) { - callback(null); - } - } +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function () { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); - for (; index < length; index++) { - iteratee(coll[index], index, onlyOnce(iteratorCallback)); - } - } +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function (object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; - // a generic version of eachOf which can handle array, object, and iterator cases. - function eachOfGeneric (coll, iteratee, callback) { - return eachOfLimit$2(coll, Infinity, iteratee, callback); + return object && object._writableState instanceof WritableState; } + }); +} else { + realHasInstance = function (object) { + return object instanceof this; + }; +} - /** - * Like [`each`]{@link module:Collections.each}, except that it passes the key (or index) as the second argument - * to the iteratee. - * - * @name eachOf - * @static - * @memberOf module:Collections - * @method - * @alias forEachOf - * @category Collection - * @see [async.each]{@link module:Collections.each} - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - A function to apply to each - * item in `coll`. - * The `key` is the item's key, or index in the case of an array. - * Invoked with (item, key, callback). - * @param {Function} [callback] - A callback which is called when all - * `iteratee` functions have finished, or an error occurs. Invoked with (err). - * @returns {Promise} a promise, if a callback is omitted - * @example - * - * var obj = {dev: "/dev.json", test: "/test.json", prod: "/prod.json"}; - * var configs = {}; - * - * async.forEachOf(obj, function (value, key, callback) { - * fs.readFile(__dirname + value, "utf8", function (err, data) { - * if (err) return callback(err); - * try { - * configs[key] = JSON.parse(data); - * } catch (e) { - * return callback(e); - * } - * callback(); - * }); - * }, function (err) { - * if (err) console.error(err.message); - * // configs is now a map of JSON data - * doSomethingWith(configs); - * }); - */ - function eachOf(coll, iteratee, callback) { - var eachOfImplementation = isArrayLike(coll) ? eachOfArrayLike : eachOfGeneric; - return eachOfImplementation(coll, wrapAsync(iteratee), callback); - } +function Writable(options) { + Duplex = Duplex || __nccwpck_require__(92931); - var eachOf$1 = awaitify(eachOf, 3); + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. - /** - * Produces a new collection of values by mapping each value in `coll` through - * the `iteratee` function. The `iteratee` is called with an item from `coll` - * and a callback for when it has finished processing. Each of these callback - * takes 2 arguments: an `error`, and the transformed item from `coll`. If - * `iteratee` passes an error to its callback, the main `callback` (for the - * `map` function) is immediately called with the error. - * - * Note, that since this function applies the `iteratee` to each item in - * parallel, there is no guarantee that the `iteratee` functions will complete - * in order. However, the results array will be in the same order as the - * original `coll`. - * - * If `map` is passed an Object, the results will be an Array. The results - * will roughly be in the order of the original Objects' keys (but this can - * vary across JavaScript engines). - * - * @name map - * @static - * @memberOf module:Collections - * @method - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with the transformed item. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Results is an Array of the - * transformed items from the `coll`. Invoked with (err, results). - * @returns {Promise} a promise, if no callback is passed - * @example - * - * async.map(['file1','file2','file3'], fs.stat, function(err, results) { - * // results is now an array of stats for each file - * }); - */ - function map (coll, iteratee, callback) { - return _asyncMap(eachOf$1, coll, iteratee, callback) - } - var map$1 = awaitify(map, 3); + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) { + return new Writable(options); + } - /** - * Applies the provided arguments to each function in the array, calling - * `callback` after all functions have completed. If you only provide the first - * argument, `fns`, then it will return a function which lets you pass in the - * arguments as if it were a single function call. If more arguments are - * provided, `callback` is required while `args` is still optional. The results - * for each of the applied async functions are passed to the final callback - * as an array. - * - * @name applyEach - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s - * to all call with the same arguments - * @param {...*} [args] - any number of separate arguments to pass to the - * function. - * @param {Function} [callback] - the final argument should be the callback, - * called when all functions have completed processing. - * @returns {AsyncFunction} - Returns a function that takes no args other than - * an optional callback, that is the result of applying the `args` to each - * of the functions. - * @example - * - * const appliedFn = async.applyEach([enableSearch, updateSchema], 'bucket') - * - * appliedFn((err, results) => { - * // results[0] is the results for `enableSearch` - * // results[1] is the results for `updateSchema` - * }); - * - * // partial application example: - * async.each( - * buckets, - * async (bucket) => async.applyEach([enableSearch, updateSchema], bucket)(), - * callback - * ); - */ - var applyEach$1 = applyEach(map$1); + this._writableState = new WritableState(options, this); - /** - * The same as [`eachOf`]{@link module:Collections.eachOf} but runs only a single async operation at a time. - * - * @name eachOfSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.eachOf]{@link module:Collections.eachOf} - * @alias forEachOfSeries - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * Invoked with (item, key, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Invoked with (err). - * @returns {Promise} a promise, if a callback is omitted - */ - function eachOfSeries(coll, iteratee, callback) { - return eachOfLimit$2(coll, 1, iteratee, callback) - } - var eachOfSeries$1 = awaitify(eachOfSeries, 3); + // legacy. + this.writable = true; - /** - * The same as [`map`]{@link module:Collections.map} but runs only a single async operation at a time. - * - * @name mapSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.map]{@link module:Collections.map} - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with the transformed item. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Results is an array of the - * transformed items from the `coll`. Invoked with (err, results). - * @returns {Promise} a promise, if no callback is passed - */ - function mapSeries (coll, iteratee, callback) { - return _asyncMap(eachOfSeries$1, coll, iteratee, callback) - } - var mapSeries$1 = awaitify(mapSeries, 3); - - /** - * The same as [`applyEach`]{@link module:ControlFlow.applyEach} but runs only a single async operation at a time. - * - * @name applyEachSeries - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.applyEach]{@link module:ControlFlow.applyEach} - * @category Control Flow - * @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s to all - * call with the same arguments - * @param {...*} [args] - any number of separate arguments to pass to the - * function. - * @param {Function} [callback] - the final argument should be the callback, - * called when all functions have completed processing. - * @returns {AsyncFunction} - A function, that when called, is the result of - * appling the `args` to the list of functions. It takes no args, other than - * a callback. - */ - var applyEachSeries = applyEach(mapSeries$1); + if (options) { + if (typeof options.write === 'function') this._write = options.write; - const PROMISE_SYMBOL = Symbol('promiseCallback'); + if (typeof options.writev === 'function') this._writev = options.writev; - function promiseCallback () { - let resolve, reject; - function callback (err, ...args) { - if (err) return reject(err) - resolve(args.length > 1 ? args : args[0]); - } + if (typeof options.destroy === 'function') this._destroy = options.destroy; - callback[PROMISE_SYMBOL] = new Promise((res, rej) => { - resolve = res, - reject = rej; - }); + if (typeof options.final === 'function') this._final = options.final; + } - return callback - } + Stream.call(this); +} - /** - * Determines the best order for running the {@link AsyncFunction}s in `tasks`, based on - * their requirements. Each function can optionally depend on other functions - * being completed first, and each function is run as soon as its requirements - * are satisfied. - * - * If any of the {@link AsyncFunction}s pass an error to their callback, the `auto` sequence - * will stop. Further tasks will not execute (so any other functions depending - * on it will not run), and the main `callback` is immediately called with the - * error. - * - * {@link AsyncFunction}s also receive an object containing the results of functions which - * have completed so far as the first argument, if they have dependencies. If a - * task function has no dependencies, it will only be passed a callback. - * - * @name auto - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Object} tasks - An object. Each of its properties is either a - * function or an array of requirements, with the {@link AsyncFunction} itself the last item - * in the array. The object's key of a property serves as the name of the task - * defined by that property, i.e. can be used when specifying requirements for - * other tasks. The function receives one or two arguments: - * * a `results` object, containing the results of the previously executed - * functions, only passed if the task has any dependencies, - * * a `callback(err, result)` function, which must be called when finished, - * passing an `error` (which can be `null`) and the result of the function's - * execution. - * @param {number} [concurrency=Infinity] - An optional `integer` for - * determining the maximum number of tasks that can be run in parallel. By - * default, as many as possible. - * @param {Function} [callback] - An optional callback which is called when all - * the tasks have been completed. It receives the `err` argument if any `tasks` - * pass an error to their callback. Results are always returned; however, if an - * error occurs, no further `tasks` will be performed, and the results object - * will only contain partial results. Invoked with (err, results). - * @returns {Promise} a promise, if a callback is not passed - * @example - * - * async.auto({ - * // this function will just be passed a callback - * readData: async.apply(fs.readFile, 'data.txt', 'utf-8'), - * showData: ['readData', function(results, cb) { - * // results.readData is the file's contents - * // ... - * }] - * }, callback); - * - * async.auto({ - * get_data: function(callback) { - * console.log('in get_data'); - * // async code to get some data - * callback(null, 'data', 'converted to array'); - * }, - * make_folder: function(callback) { - * console.log('in make_folder'); - * // async code to create a directory to store a file in - * // this is run at the same time as getting the data - * callback(null, 'folder'); - * }, - * write_file: ['get_data', 'make_folder', function(results, callback) { - * console.log('in write_file', JSON.stringify(results)); - * // once there is some data and the directory exists, - * // write the data to a file in the directory - * callback(null, 'filename'); - * }], - * email_link: ['write_file', function(results, callback) { - * console.log('in email_link', JSON.stringify(results)); - * // once the file is written let's email a link to it... - * // results.write_file contains the filename returned by write_file. - * callback(null, {'file':results.write_file, 'email':'user@example.com'}); - * }] - * }, function(err, results) { - * console.log('err = ', err); - * console.log('results = ', results); - * }); - */ - function auto(tasks, concurrency, callback) { - if (typeof concurrency !== 'number') { - // concurrency is optional, shift the args. - callback = concurrency; - concurrency = null; - } - callback = once(callback || promiseCallback()); - var numTasks = Object.keys(tasks).length; - if (!numTasks) { - return callback(null); - } - if (!concurrency) { - concurrency = numTasks; - } +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + this.emit('error', new Error('Cannot pipe, not readable')); +}; - var results = {}; - var runningTasks = 0; - var canceled = false; - var hasError = false; +function writeAfterEnd(stream, cb) { + var er = new Error('write after end'); + // TODO: defer error events consistently everywhere, not just the cb + stream.emit('error', er); + pna.nextTick(cb, er); +} - var listeners = Object.create(null); +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var valid = true; + var er = false; - var readyTasks = []; + if (chunk === null) { + er = new TypeError('May not write null values to stream'); + } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + if (er) { + stream.emit('error', er); + pna.nextTick(cb, er); + valid = false; + } + return valid; +} - // for cycle detection: - var readyToCheck = []; // tasks that have been identified as reachable - // without the possibility of returning to an ancestor task - var uncheckedDependencies = {}; +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); - Object.keys(tasks).forEach(key => { - var task = tasks[key]; - if (!Array.isArray(task)) { - // no dependencies - enqueueTask(key, [task]); - readyToCheck.push(key); - return; - } + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } - var dependencies = task.slice(0, task.length - 1); - var remainingDependencies = dependencies.length; - if (remainingDependencies === 0) { - enqueueTask(key, task); - readyToCheck.push(key); - return; - } - uncheckedDependencies[key] = remainingDependencies; + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } - dependencies.forEach(dependencyName => { - if (!tasks[dependencyName]) { - throw new Error('async.auto task `' + key + - '` has a non-existent dependency `' + - dependencyName + '` in ' + - dependencies.join(', ')); - } - addListener(dependencyName, () => { - remainingDependencies--; - if (remainingDependencies === 0) { - enqueueTask(key, task); - } - }); - }); - }); + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; - checkForDeadlocks(); - processQueue(); + if (typeof cb !== 'function') cb = nop; - function enqueueTask(key, task) { - readyTasks.push(() => runTask(key, task)); - } + if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } - function processQueue() { - if (canceled) return - if (readyTasks.length === 0 && runningTasks === 0) { - return callback(null, results); - } - while(readyTasks.length && runningTasks < concurrency) { - var run = readyTasks.shift(); - run(); - } + return ret; +}; - } +Writable.prototype.cork = function () { + var state = this._writableState; - function addListener(taskName, fn) { - var taskListeners = listeners[taskName]; - if (!taskListeners) { - taskListeners = listeners[taskName] = []; - } + state.corked++; +}; - taskListeners.push(fn); - } +Writable.prototype.uncork = function () { + var state = this._writableState; - function taskComplete(taskName) { - var taskListeners = listeners[taskName] || []; - taskListeners.forEach(fn => fn()); - processQueue(); - } + if (state.corked) { + state.corked--; + if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; - function runTask(key, task) { - if (hasError) return; +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; - var taskCallback = onlyOnce((err, ...result) => { - runningTasks--; - if (err === false) { - canceled = true; - return - } - if (result.length < 2) { - [result] = result; - } - if (err) { - var safeResults = {}; - Object.keys(results).forEach(rkey => { - safeResults[rkey] = results[rkey]; - }); - safeResults[key] = result; - hasError = true; - listeners = Object.create(null); - if (canceled) return - callback(err, safeResults); - } else { - results[key] = result; - taskComplete(key); - } - }); +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} - runningTasks++; - var taskFn = wrapAsync(task[task.length - 1]); - if (task.length > 1) { - taskFn(results, taskCallback); - } else { - taskFn(taskCallback); - } - } +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); - function checkForDeadlocks() { - // Kahn's algorithm - // https://en.wikipedia.org/wiki/Topological_sorting#Kahn.27s_algorithm - // http://connalle.blogspot.com/2013/10/topological-sortingkahn-algorithm.html - var currentTask; - var counter = 0; - while (readyToCheck.length) { - currentTask = readyToCheck.pop(); - counter++; - getDependents(currentTask).forEach(dependent => { - if (--uncheckedDependencies[dependent] === 0) { - readyToCheck.push(dependent); - } - }); - } +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + var len = state.objectMode ? 1 : chunk.length; - if (counter !== numTasks) { - throw new Error( - 'async.auto cannot execute tasks due to a recursive dependency' - ); - } - } + state.length += len; - function getDependents(taskName) { - var result = []; - Object.keys(tasks).forEach(key => { - const task = tasks[key]; - if (Array.isArray(task) && task.indexOf(taskName) >= 0) { - result.push(key); - } - }); - return result; - } + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; - return callback[PROMISE_SYMBOL] + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } - var FN_ARGS = /^(?:async\s+)?(?:function)?\s*\w*\s*\(\s*([^)]+)\s*\)(?:\s*{)/; - var ARROW_FN_ARGS = /^(?:async\s+)?\(?\s*([^)=]+)\s*\)?(?:\s*=>)/; - var FN_ARG_SPLIT = /,/; - var FN_ARG = /(=.+)?(\s*)$/; - var STRIP_COMMENTS = /((\/\/.*$)|(\/\*[\s\S]*?\*\/))/mg; + return ret; +} - function parseParams(func) { - const src = func.toString().replace(STRIP_COMMENTS, ''); - let match = src.match(FN_ARGS); - if (!match) { - match = src.match(ARROW_FN_ARGS); - } - if (!match) throw new Error('could not parse args in autoInject\nSource:\n' + src) - let [, args] = match; - return args - .replace(/\s/g, '') - .split(FN_ARG_SPLIT) - .map((arg) => arg.replace(FN_ARG, '').trim()); - } +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} - /** - * A dependency-injected version of the [async.auto]{@link module:ControlFlow.auto} function. Dependent - * tasks are specified as parameters to the function, after the usual callback - * parameter, with the parameter names matching the names of the tasks it - * depends on. This can provide even more readable task graphs which can be - * easier to maintain. - * - * If a final callback is specified, the task results are similarly injected, - * specified as named parameters after the initial error parameter. - * - * The autoInject function is purely syntactic sugar and its semantics are - * otherwise equivalent to [async.auto]{@link module:ControlFlow.auto}. - * - * @name autoInject - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.auto]{@link module:ControlFlow.auto} - * @category Control Flow - * @param {Object} tasks - An object, each of whose properties is an {@link AsyncFunction} of - * the form 'func([dependencies...], callback). The object's key of a property - * serves as the name of the task defined by that property, i.e. can be used - * when specifying requirements for other tasks. - * * The `callback` parameter is a `callback(err, result)` which must be called - * when finished, passing an `error` (which can be `null`) and the result of - * the function's execution. The remaining parameters name other tasks on - * which the task is dependent, and the results from those tasks are the - * arguments of those parameters. - * @param {Function} [callback] - An optional callback which is called when all - * the tasks have been completed. It receives the `err` argument if any `tasks` - * pass an error to their callback, and a `results` object with any completed - * task results, similar to `auto`. - * @returns {Promise} a promise, if no callback is passed - * @example - * - * // The example from `auto` can be rewritten as follows: - * async.autoInject({ - * get_data: function(callback) { - * // async code to get some data - * callback(null, 'data', 'converted to array'); - * }, - * make_folder: function(callback) { - * // async code to create a directory to store a file in - * // this is run at the same time as getting the data - * callback(null, 'folder'); - * }, - * write_file: function(get_data, make_folder, callback) { - * // once there is some data and the directory exists, - * // write the data to a file in the directory - * callback(null, 'filename'); - * }, - * email_link: function(write_file, callback) { - * // once the file is written let's email a link to it... - * // write_file contains the filename returned by write_file. - * callback(null, {'file':write_file, 'email':'user@example.com'}); - * } - * }, function(err, results) { - * console.log('err = ', err); - * console.log('email_link = ', results.email_link); - * }); - * - * // If you are using a JS minifier that mangles parameter names, `autoInject` - * // will not work with plain functions, since the parameter names will be - * // collapsed to a single letter identifier. To work around this, you can - * // explicitly specify the names of the parameters your task function needs - * // in an array, similar to Angular.js dependency injection. - * - * // This still has an advantage over plain `auto`, since the results a task - * // depends on are still spread into arguments. - * async.autoInject({ - * //... - * write_file: ['get_data', 'make_folder', function(get_data, make_folder, callback) { - * callback(null, 'filename'); - * }], - * email_link: ['write_file', function(write_file, callback) { - * callback(null, {'file':write_file, 'email':'user@example.com'}); - * }] - * //... - * }, function(err, results) { - * console.log('err = ', err); - * console.log('email_link = ', results.email_link); - * }); - */ - function autoInject(tasks, callback) { - var newTasks = {}; +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; - Object.keys(tasks).forEach(key => { - var taskFn = tasks[key]; - var params; - var fnIsAsync = isAsync(taskFn); - var hasNoDeps = - (!fnIsAsync && taskFn.length === 1) || - (fnIsAsync && taskFn.length === 0); + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + pna.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + pna.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } +} - if (Array.isArray(taskFn)) { - params = [...taskFn]; - taskFn = params.pop(); +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} - newTasks[key] = params.concat(params.length > 0 ? newTask : taskFn); - } else if (hasNoDeps) { - // no dependencies, use the function as-is - newTasks[key] = taskFn; - } else { - params = parseParams(taskFn); - if ((taskFn.length === 0 && !fnIsAsync) && params.length === 0) { - throw new Error("autoInject task functions require explicit parameters."); - } +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; - // remove callback param - if (!fnIsAsync) params.pop(); + onwriteStateUpdate(state); - newTasks[key] = params.concat(newTask); - } + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state); - function newTask(results, taskCb) { - var newArgs = params.map(name => results[name]); - newArgs.push(taskCb); - wrapAsync(taskFn)(...newArgs); - } - }); + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } - return auto(newTasks, callback); + if (sync) { + /**/ + asyncWrite(afterWrite, stream, state, finished, cb); + /**/ + } else { + afterWrite(stream, state, finished, cb); } + } +} - // Simple doubly linked list (https://en.wikipedia.org/wiki/Doubly_linked_list) implementation - // used for queues. This implementation assumes that the node provided by the user can be modified - // to adjust the next and last properties. We implement only the minimal functionality - // for queue support. - class DLL { - constructor() { - this.head = this.tail = null; - this.length = 0; - } +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} - removeLink(node) { - if (node.prev) node.prev.next = node.next; - else this.head = node.next; - if (node.next) node.next.prev = node.prev; - else this.tail = node.prev; +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} - node.prev = node.next = null; - this.length -= 1; - return node; - } +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; - empty () { - while(this.head) this.shift(); - return this; - } + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; - insertAfter(node, newNode) { - newNode.prev = node; - newNode.next = node.next; - if (node.next) node.next.prev = newNode; - else this.tail = newNode; - node.next = newNode; - this.length += 1; - } + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; - insertBefore(node, newNode) { - newNode.prev = node.prev; - newNode.next = node; - if (node.prev) node.prev.next = newNode; - else this.head = newNode; - node.prev = newNode; - this.length += 1; - } + doWrite(stream, state, true, state.length, buffer, '', holder.finish); - unshift(node) { - if (this.head) this.insertBefore(this.head, node); - else setInitial(this, node); - } + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; - push(node) { - if (this.tail) this.insertAfter(this.tail, node); - else setInitial(this, node); - } + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } - shift() { - return this.head && this.removeLink(this.head); - } + if (entry === null) state.lastBufferedRequest = null; + } - pop() { - return this.tail && this.removeLink(this.tail); - } + state.bufferedRequest = entry; + state.bufferProcessing = false; +} - toArray() { - return [...this] - } +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new Error('_write() is not implemented')); +}; - *[Symbol.iterator] () { - var cur = this.head; - while (cur) { - yield cur.data; - cur = cur.next; - } - } +Writable.prototype._writev = null; - remove (testFn) { - var curr = this.head; - while(curr) { - var {next} = curr; - if (testFn(curr)) { - this.removeLink(curr); - } - curr = next; - } - return this; - } +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending && !state.finished) endWritable(this, state, cb); +}; + +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + stream.emit('error', err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function') { + state.pendingcb++; + state.finalCalled = true; + pna.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); } + } +} - function setInitial(dll, node) { - dll.length = 1; - dll.head = dll.tail = node; +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); } + } + return need; +} - function queue(worker, concurrency, payload) { - if (concurrency == null) { - concurrency = 1; - } - else if(concurrency === 0) { - throw new RangeError('Concurrency must not be zero'); - } +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) pna.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} - var _worker = wrapAsync(worker); - var numRunning = 0; - var workersList = []; - const events = { - error: [], - drain: [], - saturated: [], - unsaturated: [], - empty: [] - }; +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + if (state.corkedRequestsFree) { + state.corkedRequestsFree.next = corkReq; + } else { + state.corkedRequestsFree = corkReq; + } +} - function on (event, handler) { - events[event].push(handler); - } +Object.defineProperty(Writable.prototype, 'destroyed', { + get: function () { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } - function once (event, handler) { - const handleAndRemove = (...args) => { - off(event, handleAndRemove); - handler(...args); - }; - events[event].push(handleAndRemove); - } + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); - function off (event, handler) { - if (!event) return Object.keys(events).forEach(ev => events[ev] = []) - if (!handler) return events[event] = [] - events[event] = events[event].filter(ev => ev !== handler); - } +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + this.end(); + cb(err); +}; - function trigger (event, ...args) { - events[event].forEach(handler => handler(...args)); - } +/***/ }), - var processingScheduled = false; - function _insert(data, insertAtFront, rejectOnError, callback) { - if (callback != null && typeof callback !== 'function') { - throw new Error('task callback must be a function'); - } - q.started = true; +/***/ 52743: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - var res, rej; - function promiseCallback (err, ...args) { - // we don't care about the error, let the global error handler - // deal with it - if (err) return rejectOnError ? rej(err) : res() - if (args.length <= 1) return res(args[0]) - res(args); - } +"use strict"; - var item = { - data, - callback: rejectOnError ? - promiseCallback : - (callback || promiseCallback) - }; - if (insertAtFront) { - q._tasks.unshift(item); - } else { - q._tasks.push(item); - } +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - if (!processingScheduled) { - processingScheduled = true; - setImmediate$1(() => { - processingScheduled = false; - q.process(); - }); - } +var Buffer = __nccwpck_require__(95237).Buffer; +var util = __nccwpck_require__(31669); - if (rejectOnError || !callback) { - return new Promise((resolve, reject) => { - res = resolve; - rej = reject; - }) - } - } +function copyBuffer(src, target, offset) { + src.copy(target, offset); +} - function _createCB(tasks) { - return function (err, ...args) { - numRunning -= 1; +module.exports = function () { + function BufferList() { + _classCallCheck(this, BufferList); - for (var i = 0, l = tasks.length; i < l; i++) { - var task = tasks[i]; + this.head = null; + this.tail = null; + this.length = 0; + } - var index = workersList.indexOf(task); - if (index === 0) { - workersList.shift(); - } else if (index > 0) { - workersList.splice(index, 1); - } + BufferList.prototype.push = function push(v) { + var entry = { data: v, next: null }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + }; - task.callback(err, ...args); + BufferList.prototype.unshift = function unshift(v) { + var entry = { data: v, next: this.head }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + }; - if (err != null) { - trigger('error', err, task.data); - } - } + BufferList.prototype.shift = function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + }; - if (numRunning <= (q.concurrency - q.buffer) ) { - trigger('unsaturated'); - } + BufferList.prototype.clear = function clear() { + this.head = this.tail = null; + this.length = 0; + }; - if (q.idle()) { - trigger('drain'); - } - q.process(); - }; - } + BufferList.prototype.join = function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) { + ret += s + p.data; + }return ret; + }; - function _maybeDrain(data) { - if (data.length === 0 && q.idle()) { - // call drain immediately if there are no tasks - setImmediate$1(() => trigger('drain')); - return true - } - return false - } + BufferList.prototype.concat = function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + if (this.length === 1) return this.head.data; + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + }; - const eventMethod = (name) => (handler) => { - if (!handler) { - return new Promise((resolve, reject) => { - once(name, (err, data) => { - if (err) return reject(err) - resolve(data); - }); - }) - } - off(name); - on(name, handler); + return BufferList; +}(); - }; +if (util && util.inspect && util.inspect.custom) { + module.exports.prototype[util.inspect.custom] = function () { + var obj = util.inspect({ length: this.length }); + return this.constructor.name + ' ' + obj; + }; +} - var isProcessing = false; - var q = { - _tasks: new DLL(), - *[Symbol.iterator] () { - yield* q._tasks[Symbol.iterator](); - }, - concurrency, - payload, - buffer: concurrency / 4, - started: false, - paused: false, - push (data, callback) { - if (Array.isArray(data)) { - if (_maybeDrain(data)) return - return data.map(datum => _insert(datum, false, false, callback)) - } - return _insert(data, false, false, callback); - }, - pushAsync (data, callback) { - if (Array.isArray(data)) { - if (_maybeDrain(data)) return - return data.map(datum => _insert(datum, false, true, callback)) - } - return _insert(data, false, true, callback); - }, - kill () { - off(); - q._tasks.empty(); - }, - unshift (data, callback) { - if (Array.isArray(data)) { - if (_maybeDrain(data)) return - return data.map(datum => _insert(datum, true, false, callback)) - } - return _insert(data, true, false, callback); - }, - unshiftAsync (data, callback) { - if (Array.isArray(data)) { - if (_maybeDrain(data)) return - return data.map(datum => _insert(datum, true, true, callback)) - } - return _insert(data, true, true, callback); - }, - remove (testFn) { - q._tasks.remove(testFn); - }, - process () { - // Avoid trying to start too many processing operations. This can occur - // when callbacks resolve synchronously (#1267). - if (isProcessing) { - return; - } - isProcessing = true; - while(!q.paused && numRunning < q.concurrency && q._tasks.length){ - var tasks = [], data = []; - var l = q._tasks.length; - if (q.payload) l = Math.min(l, q.payload); - for (var i = 0; i < l; i++) { - var node = q._tasks.shift(); - tasks.push(node); - workersList.push(node); - data.push(node.data); - } +/***/ }), - numRunning += 1; +/***/ 69147: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (q._tasks.length === 0) { - trigger('empty'); - } +"use strict"; - if (numRunning === q.concurrency) { - trigger('saturated'); - } - var cb = onlyOnce(_createCB(tasks)); - _worker(data, cb); - } - isProcessing = false; - }, - length () { - return q._tasks.length; - }, - running () { - return numRunning; - }, - workersList () { - return workersList; - }, - idle() { - return q._tasks.length + numRunning === 0; - }, - pause () { - q.paused = true; - }, - resume () { - if (q.paused === false) { return; } - q.paused = false; - setImmediate$1(q.process); - } - }; - // define these as fixed properties, so people get useful errors when updating - Object.defineProperties(q, { - saturated: { - writable: false, - value: eventMethod('saturated') - }, - unsaturated: { - writable: false, - value: eventMethod('unsaturated') - }, - empty: { - writable: false, - value: eventMethod('empty') - }, - drain: { - writable: false, - value: eventMethod('drain') - }, - error: { - writable: false, - value: eventMethod('error') - }, - }); - return q; - } +/**/ - /** - * Creates a `cargo` object with the specified payload. Tasks added to the - * cargo will be processed altogether (up to the `payload` limit). If the - * `worker` is in progress, the task is queued until it becomes available. Once - * the `worker` has completed some tasks, each callback of those tasks is - * called. Check out [these](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) [animations](https://camo.githubusercontent.com/f4810e00e1c5f5f8addbe3e9f49064fd5d102699/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130312f38346339323036362d356632392d313165322d383134662d3964336430323431336266642e676966) - * for how `cargo` and `queue` work. - * - * While [`queue`]{@link module:ControlFlow.queue} passes only one task to one of a group of workers - * at a time, cargo passes an array of tasks to a single worker, repeating - * when the worker is finished. - * - * @name cargo - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.queue]{@link module:ControlFlow.queue} - * @category Control Flow - * @param {AsyncFunction} worker - An asynchronous function for processing an array - * of queued tasks. Invoked with `(tasks, callback)`. - * @param {number} [payload=Infinity] - An optional `integer` for determining - * how many tasks should be processed per round; if omitted, the default is - * unlimited. - * @returns {module:ControlFlow.QueueObject} A cargo object to manage the tasks. Callbacks can - * attached as certain properties to listen for specific events during the - * lifecycle of the cargo and inner queue. - * @example - * - * // create a cargo object with payload 2 - * var cargo = async.cargo(function(tasks, callback) { - * for (var i=0; i*/ - /** - * Creates a `cargoQueue` object with the specified payload. Tasks added to the - * cargoQueue will be processed together (up to the `payload` limit) in `concurrency` parallel workers. - * If the all `workers` are in progress, the task is queued until one becomes available. Once - * a `worker` has completed some tasks, each callback of those tasks is - * called. Check out [these](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) [animations](https://camo.githubusercontent.com/f4810e00e1c5f5f8addbe3e9f49064fd5d102699/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130312f38346339323036362d356632392d313165322d383134662d3964336430323431336266642e676966) - * for how `cargo` and `queue` work. - * - * While [`queue`]{@link module:ControlFlow.queue} passes only one task to one of a group of workers - * at a time, and [`cargo`]{@link module:ControlFlow.cargo} passes an array of tasks to a single worker, - * the cargoQueue passes an array of tasks to multiple parallel workers. - * - * @name cargoQueue - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.queue]{@link module:ControlFlow.queue} - * @see [async.cargo]{@link module:ControlFLow.cargo} - * @category Control Flow - * @param {AsyncFunction} worker - An asynchronous function for processing an array - * of queued tasks. Invoked with `(tasks, callback)`. - * @param {number} [concurrency=1] - An `integer` for determining how many - * `worker` functions should be run in parallel. If omitted, the concurrency - * defaults to `1`. If the concurrency is `0`, an error is thrown. - * @param {number} [payload=Infinity] - An optional `integer` for determining - * how many tasks should be processed per round; if omitted, the default is - * unlimited. - * @returns {module:ControlFlow.QueueObject} A cargoQueue object to manage the tasks. Callbacks can - * attached as certain properties to listen for specific events during the - * lifecycle of the cargoQueue and inner queue. - * @example - * - * // create a cargoQueue object with payload 2 and concurrency 2 - * var cargoQueue = async.cargoQueue(function(tasks, callback) { - * for (var i=0; i { - _iteratee(memo, x, (err, v) => { - memo = v; - iterCb(err); - }); - }, err => callback(err, memo)); + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err && (!this._writableState || !this._writableState.errorEmitted)) { + pna.nextTick(emitErrorNT, this, err); } - var reduce$1 = awaitify(reduce, 4); + return this; + } - /** - * Version of the compose function that is more natural to read. Each function - * consumes the return value of the previous function. It is the equivalent of - * [compose]{@link module:ControlFlow.compose} with the arguments reversed. - * - * Each function is executed with the `this` binding of the composed function. - * - * @name seq - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.compose]{@link module:ControlFlow.compose} - * @category Control Flow - * @param {...AsyncFunction} functions - the asynchronous functions to compose - * @returns {Function} a function that composes the `functions` in order - * @example - * - * // Requires lodash (or underscore), express3 and dresende's orm2. - * // Part of an app, that fetches cats of the logged user. - * // This example uses `seq` function to avoid overnesting and error - * // handling clutter. - * app.get('/cats', function(request, response) { - * var User = request.models.User; - * async.seq( - * _.bind(User.get, User), // 'User.get' has signature (id, callback(err, data)) - * function(user, fn) { - * user.getCats(fn); // 'getCats' has signature (callback(err, data)) - * } - * )(req.session.user_id, function (err, cats) { - * if (err) { - * console.error(err); - * response.json({ status: 'error', message: err.message }); - * } else { - * response.json({ status: 'ok', message: 'Cats found', data: cats }); - * } - * }); - * }); - */ - function seq(...functions) { - var _functions = functions.map(wrapAsync); - return function (...args) { - var that = this; + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks - var cb = args[args.length - 1]; - if (typeof cb == 'function') { - args.pop(); - } else { - cb = promiseCallback(); - } + if (this._readableState) { + this._readableState.destroyed = true; + } - reduce$1(_functions, args, (newargs, fn, iterCb) => { - fn.apply(that, newargs.concat((err, ...nextargs) => { - iterCb(err, nextargs); - })); - }, - (err, results) => cb(err, ...results)); + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } - return cb[PROMISE_SYMBOL] - }; + this._destroy(err || null, function (err) { + if (!cb && err) { + pna.nextTick(emitErrorNT, _this, err); + if (_this._writableState) { + _this._writableState.errorEmitted = true; + } + } else if (cb) { + cb(err); } + }); - /** - * Creates a function which is a composition of the passed asynchronous - * functions. Each function consumes the return value of the function that - * follows. Composing functions `f()`, `g()`, and `h()` would produce the result - * of `f(g(h()))`, only this version uses callbacks to obtain the return values. - * - * If the last argument to the composed function is not a function, a promise - * is returned when you call it. - * - * Each function is executed with the `this` binding of the composed function. - * - * @name compose - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {...AsyncFunction} functions - the asynchronous functions to compose - * @returns {Function} an asynchronous function that is the composed - * asynchronous `functions` - * @example - * - * function add1(n, callback) { - * setTimeout(function () { - * callback(null, n + 1); - * }, 10); - * } - * - * function mul3(n, callback) { - * setTimeout(function () { - * callback(null, n * 3); - * }, 10); - * } - * - * var add1mul3 = async.compose(mul3, add1); - * add1mul3(4, function (err, result) { - * // result now equals 15 - * }); - */ - function compose(...args) { - return seq(...args.reverse()); - } + return this; +} - /** - * The same as [`map`]{@link module:Collections.map} but runs a maximum of `limit` async operations at a time. - * - * @name mapLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.map]{@link module:Collections.map} - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with the transformed item. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Results is an array of the - * transformed items from the `coll`. Invoked with (err, results). - * @returns {Promise} a promise, if no callback is passed - */ - function mapLimit (coll, limit, iteratee, callback) { - return _asyncMap(eachOfLimit(limit), coll, iteratee, callback) - } - var mapLimit$1 = awaitify(mapLimit, 4); +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } - /** - * The same as [`concat`]{@link module:Collections.concat} but runs a maximum of `limit` async operations at a time. - * - * @name concatLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.concat]{@link module:Collections.concat} - * @category Collection - * @alias flatMapLimit - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, - * which should use an array as its result. Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished, or an error occurs. Results is an array - * containing the concatenated results of the `iteratee` function. Invoked with - * (err, results). - * @returns A Promise, if no callback is passed - */ - function concatLimit(coll, limit, iteratee, callback) { - var _iteratee = wrapAsync(iteratee); - return mapLimit$1(coll, limit, (val, iterCb) => { - _iteratee(val, (err, ...args) => { - if (err) return iterCb(err); - return iterCb(err, args); - }); - }, (err, mapResults) => { - var result = []; - for (var i = 0; i < mapResults.length; i++) { - if (mapResults[i]) { - result = result.concat(...mapResults[i]); - } - } + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} - return callback(err, result); - }); - } - var concatLimit$1 = awaitify(concatLimit, 4); +function emitErrorNT(self, err) { + self.emit('error', err); +} - /** - * Applies `iteratee` to each item in `coll`, concatenating the results. Returns - * the concatenated list. The `iteratee`s are called in parallel, and the - * results are concatenated as they return. The results array will be returned in - * the original order of `coll` passed to the `iteratee` function. - * - * @name concat - * @static - * @memberOf module:Collections - * @method - * @category Collection - * @alias flatMap - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, - * which should use an array as its result. Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished, or an error occurs. Results is an array - * containing the concatenated results of the `iteratee` function. Invoked with - * (err, results). - * @returns A Promise, if no callback is passed - * @example - * - * async.concat(['dir1','dir2','dir3'], fs.readdir, function(err, files) { - * // files is now a list of filenames that exist in the 3 directories - * }); - */ - function concat(coll, iteratee, callback) { - return concatLimit$1(coll, Infinity, iteratee, callback) - } - var concat$1 = awaitify(concat, 3); +module.exports = { + destroy: destroy, + undestroy: undestroy +}; - /** - * The same as [`concat`]{@link module:Collections.concat} but runs only a single async operation at a time. - * - * @name concatSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.concat]{@link module:Collections.concat} - * @category Collection - * @alias flatMapSeries - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`. - * The iteratee should complete with an array an array of results. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished, or an error occurs. Results is an array - * containing the concatenated results of the `iteratee` function. Invoked with - * (err, results). - * @returns A Promise, if no callback is passed - */ - function concatSeries(coll, iteratee, callback) { - return concatLimit$1(coll, 1, iteratee, callback) - } - var concatSeries$1 = awaitify(concatSeries, 3); +/***/ }), - /** - * Returns a function that when called, calls-back with the values provided. - * Useful as the first function in a [`waterfall`]{@link module:ControlFlow.waterfall}, or for plugging values in to - * [`auto`]{@link module:ControlFlow.auto}. - * - * @name constant - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {...*} arguments... - Any number of arguments to automatically invoke - * callback with. - * @returns {AsyncFunction} Returns a function that when invoked, automatically - * invokes the callback with the previous given arguments. - * @example - * - * async.waterfall([ - * async.constant(42), - * function (value, next) { - * // value === 42 - * }, - * //... - * ], callback); - * - * async.waterfall([ - * async.constant(filename, "utf8"), - * fs.readFile, - * function (fileData, next) { - * //... - * } - * //... - * ], callback); - * - * async.auto({ - * hostname: async.constant("https://server.net/"), - * port: findFreePort, - * launchServer: ["hostname", "port", function (options, cb) { - * startServer(options, cb); - * }], - * //... - * }, callback); - */ - function constant(...args) { - return function (...ignoredArgs/*, callback*/) { - var callback = ignoredArgs.pop(); - return callback(null, ...args); - }; - } +/***/ 69777: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - function _createTester(check, getResult) { - return (eachfn, arr, _iteratee, cb) => { - var testPassed = false; - var testResult; - const iteratee = wrapAsync(_iteratee); - eachfn(arr, (value, _, callback) => { - iteratee(value, (err, result) => { - if (err || err === false) return callback(err); +module.exports = __nccwpck_require__(92413); - if (check(result) && !testResult) { - testPassed = true; - testResult = getResult(true, value); - return callback(null, breakLoop); - } - callback(); - }); - }, err => { - if (err) return cb(err); - cb(null, testPassed ? testResult : getResult(false)); - }); - }; - } - /** - * Returns the first value in `coll` that passes an async truth test. The - * `iteratee` is applied in parallel, meaning the first iteratee to return - * `true` will fire the detect `callback` with that result. That means the - * result might not be the first item in the original `coll` (in terms of order) - * that passes the test. +/***/ }), - * If order within the original `coll` is important, then look at - * [`detectSeries`]{@link module:Collections.detectSeries}. - * - * @name detect - * @static - * @memberOf module:Collections - * @method - * @alias find - * @category Collections - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. - * The iteratee must complete with a boolean value as its result. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the `iteratee` functions have finished. - * Result will be the first item in the array that passes the truth test - * (iteratee) or the value `undefined` if none passed. Invoked with - * (err, result). - * @returns A Promise, if no callback is passed - * @example - * - * async.detect(['file1','file2','file3'], function(filePath, callback) { - * fs.access(filePath, function(err) { - * callback(null, !err) - * }); - * }, function(err, result) { - * // result now equals the first file in the list that exists - * }); - */ - function detect(coll, iteratee, callback) { - return _createTester(bool => bool, (res, item) => item)(eachOf$1, coll, iteratee, callback) - } - var detect$1 = awaitify(detect, 3); +/***/ 64043: +/***/ ((module, exports, __nccwpck_require__) => { - /** - * The same as [`detect`]{@link module:Collections.detect} but runs a maximum of `limit` async operations at a - * time. - * - * @name detectLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.detect]{@link module:Collections.detect} - * @alias findLimit - * @category Collections - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. - * The iteratee must complete with a boolean value as its result. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the `iteratee` functions have finished. - * Result will be the first item in the array that passes the truth test - * (iteratee) or the value `undefined` if none passed. Invoked with - * (err, result). - * @returns a Promise if no callback is passed - */ - function detectLimit(coll, limit, iteratee, callback) { - return _createTester(bool => bool, (res, item) => item)(eachOfLimit(limit), coll, iteratee, callback) - } - var detectLimit$1 = awaitify(detectLimit, 4); +var Stream = __nccwpck_require__(92413); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream; + exports = module.exports = Stream.Readable; + exports.Readable = Stream.Readable; + exports.Writable = Stream.Writable; + exports.Duplex = Stream.Duplex; + exports.Transform = Stream.Transform; + exports.PassThrough = Stream.PassThrough; + exports.Stream = Stream; +} else { + exports = module.exports = __nccwpck_require__(70661); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = __nccwpck_require__(7631); + exports.Duplex = __nccwpck_require__(92931); + exports.Transform = __nccwpck_require__(53937); + exports.PassThrough = __nccwpck_require__(61894); +} - /** - * The same as [`detect`]{@link module:Collections.detect} but runs only a single async operation at a time. - * - * @name detectSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.detect]{@link module:Collections.detect} - * @alias findSeries - * @category Collections - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. - * The iteratee must complete with a boolean value as its result. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the `iteratee` functions have finished. - * Result will be the first item in the array that passes the truth test - * (iteratee) or the value `undefined` if none passed. Invoked with - * (err, result). - * @returns a Promise if no callback is passed - */ - function detectSeries(coll, iteratee, callback) { - return _createTester(bool => bool, (res, item) => item)(eachOfLimit(1), coll, iteratee, callback) - } - var detectSeries$1 = awaitify(detectSeries, 3); +/***/ }), - function consoleFunc(name) { - return (fn, ...args) => wrapAsync(fn)(...args, (err, ...resultArgs) => { - if (typeof console === 'object') { - if (err) { - if (console.error) { - console.error(err); - } - } else if (console[name]) { - resultArgs.forEach(x => console[name](x)); - } - } - }) - } +/***/ 95237: +/***/ ((module, exports, __nccwpck_require__) => { - /** - * Logs the result of an [`async` function]{@link AsyncFunction} to the - * `console` using `console.dir` to display the properties of the resulting object. - * Only works in Node.js or in browsers that support `console.dir` and - * `console.error` (such as FF and Chrome). - * If multiple arguments are returned from the async function, - * `console.dir` is called on each argument in order. - * - * @name dir - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {AsyncFunction} function - The function you want to eventually apply - * all arguments to. - * @param {...*} arguments... - Any number of arguments to apply to the function. - * @example - * - * // in a module - * var hello = function(name, callback) { - * setTimeout(function() { - * callback(null, {hello: name}); - * }, 1000); - * }; - * - * // in the node repl - * node> async.dir(hello, 'world'); - * {hello: 'world'} - */ - var dir = consoleFunc('dir'); +/* eslint-disable node/no-deprecated-api */ +var buffer = __nccwpck_require__(64293) +var Buffer = buffer.Buffer - /** - * The post-check version of [`whilst`]{@link module:ControlFlow.whilst}. To reflect the difference in - * the order of operations, the arguments `test` and `iteratee` are switched. - * - * `doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript. - * - * @name doWhilst - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.whilst]{@link module:ControlFlow.whilst} - * @category Control Flow - * @param {AsyncFunction} iteratee - A function which is called each time `test` - * passes. Invoked with (callback). - * @param {AsyncFunction} test - asynchronous truth test to perform after each - * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the - * non-error args from the previous callback of `iteratee`. - * @param {Function} [callback] - A callback which is called after the test - * function has failed and repeated execution of `iteratee` has stopped. - * `callback` will be passed an error and any arguments passed to the final - * `iteratee`'s callback. Invoked with (err, [results]); - * @returns {Promise} a promise, if no callback is passed - */ - function doWhilst(iteratee, test, callback) { - callback = onlyOnce(callback); - var _fn = wrapAsync(iteratee); - var _test = wrapAsync(test); - var results; +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} - function next(err, ...args) { - if (err) return callback(err); - if (err === false) return; - results = args; - _test(...args, check); - } +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} - function check(err, truth) { - if (err) return callback(err); - if (err === false) return; - if (!truth) return callback(null, ...results); - _fn(next); - } +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) - return check(null, true); +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) } + } else { + buf.fill(0) + } + return buf +} - var doWhilst$1 = awaitify(doWhilst, 3); +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} - /** - * Like ['doWhilst']{@link module:ControlFlow.doWhilst}, except the `test` is inverted. Note the - * argument ordering differs from `until`. - * - * @name doUntil - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.doWhilst]{@link module:ControlFlow.doWhilst} - * @category Control Flow - * @param {AsyncFunction} iteratee - An async function which is called each time - * `test` fails. Invoked with (callback). - * @param {AsyncFunction} test - asynchronous truth test to perform after each - * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the - * non-error args from the previous callback of `iteratee` - * @param {Function} [callback] - A callback which is called after the test - * function has passed and repeated execution of `iteratee` has stopped. `callback` - * will be passed an error and any arguments passed to the final `iteratee`'s - * callback. Invoked with (err, [results]); - * @returns {Promise} a promise, if no callback is passed - */ - function doUntil(iteratee, test, callback) { - const _test = wrapAsync(test); - return doWhilst$1(iteratee, (...args) => { - const cb = args.pop(); - _test(...args, (err, truth) => cb (err, !truth)); - }, callback); - } +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} - function _withoutIndex(iteratee) { - return (value, index, callback) => iteratee(value, callback); - } - /** - * Applies the function `iteratee` to each item in `coll`, in parallel. - * The `iteratee` is called with an item from the list, and a callback for when - * it has finished. If the `iteratee` passes an error to its `callback`, the - * main `callback` (for the `each` function) is immediately called with the - * error. - * - * Note, that since this function applies `iteratee` to each item in parallel, - * there is no guarantee that the iteratee functions will complete in order. - * - * @name each - * @static - * @memberOf module:Collections - * @method - * @alias forEach - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to - * each item in `coll`. Invoked with (item, callback). - * The array index is not passed to the iteratee. - * If you need the index, use `eachOf`. - * @param {Function} [callback] - A callback which is called when all - * `iteratee` functions have finished, or an error occurs. Invoked with (err). - * @returns {Promise} a promise, if a callback is omitted - * @example - * - * // assuming openFiles is an array of file names and saveFile is a function - * // to save the modified contents of that file: - * - * async.each(openFiles, saveFile, function(err){ - * // if any of the saves produced an error, err would equal that error - * }); - * - * // assuming openFiles is an array of file names - * async.each(openFiles, function(file, callback) { - * - * // Perform operation on file here. - * console.log('Processing file ' + file); - * - * if( file.length > 32 ) { - * console.log('This file name is too long'); - * callback('File name too long'); - * } else { - * // Do work to process file here - * console.log('File processed'); - * callback(); - * } - * }, function(err) { - * // if any of the file processing produced an error, err would equal that error - * if( err ) { - * // One of the iterations produced an error. - * // All processing will now stop. - * console.log('A file failed to process'); - * } else { - * console.log('All files have been processed successfully'); - * } - * }); - */ - function eachLimit(coll, iteratee, callback) { - return eachOf$1(coll, _withoutIndex(wrapAsync(iteratee)), callback); - } +/***/ }), - var each = awaitify(eachLimit, 3); +/***/ 63824: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - /** - * The same as [`each`]{@link module:Collections.each} but runs a maximum of `limit` async operations at a time. - * - * @name eachLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.each]{@link module:Collections.each} - * @alias forEachLimit - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The array index is not passed to the iteratee. - * If you need the index, use `eachOfLimit`. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called when all - * `iteratee` functions have finished, or an error occurs. Invoked with (err). - * @returns {Promise} a promise, if a callback is omitted - */ - function eachLimit$1(coll, limit, iteratee, callback) { - return eachOfLimit(limit)(coll, _withoutIndex(wrapAsync(iteratee)), callback); - } - var eachLimit$2 = awaitify(eachLimit$1, 4); +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. - /** - * The same as [`each`]{@link module:Collections.each} but runs only a single async operation at a time. - * - * Note, that unlike [`each`]{@link module:Collections.each}, this function applies iteratee to each item - * in series and therefore the iteratee functions will complete in order. - * @name eachSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.each]{@link module:Collections.each} - * @alias forEachSeries - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each - * item in `coll`. - * The array index is not passed to the iteratee. - * If you need the index, use `eachOfSeries`. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called when all - * `iteratee` functions have finished, or an error occurs. Invoked with (err). - * @returns {Promise} a promise, if a callback is omitted - */ - function eachSeries(coll, iteratee, callback) { - return eachLimit$2(coll, 1, iteratee, callback) - } - var eachSeries$1 = awaitify(eachSeries, 3); - /** - * Wrap an async function and ensure it calls its callback on a later tick of - * the event loop. If the function already calls its callback on a next tick, - * no extra deferral is added. This is useful for preventing stack overflows - * (`RangeError: Maximum call stack size exceeded`) and generally keeping - * [Zalgo](http://blog.izs.me/post/59142742143/designing-apis-for-asynchrony) - * contained. ES2017 `async` functions are returned as-is -- they are immune - * to Zalgo's corrupting influences, as they always resolve on a later tick. - * - * @name ensureAsync - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {AsyncFunction} fn - an async function, one that expects a node-style - * callback as its last argument. - * @returns {AsyncFunction} Returns a wrapped function with the exact same call - * signature as the function passed in. - * @example - * - * function sometimesAsync(arg, callback) { - * if (cache[arg]) { - * return callback(null, cache[arg]); // this would be synchronous!! - * } else { - * doSomeIO(arg, callback); // this IO would be asynchronous - * } - * } - * - * // this has a risk of stack overflows if many results are cached in a row - * async.mapSeries(args, sometimesAsync, done); - * - * // this will defer sometimesAsync's callback if necessary, - * // preventing stack overflows - * async.mapSeries(args, async.ensureAsync(sometimesAsync), done); - */ - function ensureAsync(fn) { - if (isAsync(fn)) return fn; - return function (...args/*, callback*/) { - var callback = args.pop(); - var sync = true; - args.push((...innerArgs) => { - if (sync) { - setImmediate$1(() => callback(...innerArgs)); - } else { - callback(...innerArgs); - } - }); - fn.apply(this, args); - sync = false; - }; - } +/**/ - /** - * Returns `true` if every element in `coll` satisfies an async test. If any - * iteratee call returns `false`, the main `callback` is immediately called. - * - * @name every - * @static - * @memberOf module:Collections - * @method - * @alias all - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collection in parallel. - * The iteratee must complete with a boolean result value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result will be either `true` or `false` - * depending on the values of the async tests. Invoked with (err, result). - * @returns {Promise} a promise, if no callback provided - * @example - * - * async.every(['file1','file2','file3'], function(filePath, callback) { - * fs.access(filePath, function(err) { - * callback(null, !err) - * }); - * }, function(err, result) { - * // if result is true then every file exists - * }); - */ - function every(coll, iteratee, callback) { - return _createTester(bool => !bool, res => !res)(eachOf$1, coll, iteratee, callback) - } - var every$1 = awaitify(every, 3); +var Buffer = __nccwpck_require__(95237).Buffer; +/**/ - /** - * The same as [`every`]{@link module:Collections.every} but runs a maximum of `limit` async operations at a time. - * - * @name everyLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.every]{@link module:Collections.every} - * @alias allLimit - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collection in parallel. - * The iteratee must complete with a boolean result value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result will be either `true` or `false` - * depending on the values of the async tests. Invoked with (err, result). - * @returns {Promise} a promise, if no callback provided - */ - function everyLimit(coll, limit, iteratee, callback) { - return _createTester(bool => !bool, res => !res)(eachOfLimit(limit), coll, iteratee, callback) - } - var everyLimit$1 = awaitify(everyLimit, 4); +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; + } +}; - /** - * The same as [`every`]{@link module:Collections.every} but runs only a single async operation at a time. - * - * @name everySeries - * @static - * @memberOf module:Collections - * @method - * @see [async.every]{@link module:Collections.every} - * @alias allSeries - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collection in series. - * The iteratee must complete with a boolean result value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result will be either `true` or `false` - * depending on the values of the async tests. Invoked with (err, result). - * @returns {Promise} a promise, if no callback provided - */ - function everySeries(coll, iteratee, callback) { - return _createTester(bool => !bool, res => !res)(eachOfSeries$1, coll, iteratee, callback) +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; } - var everySeries$1 = awaitify(everySeries, 3); + } +}; - function filterArray(eachfn, arr, iteratee, callback) { - var truthValues = new Array(arr.length); - eachfn(arr, (x, index, iterCb) => { - iteratee(x, (err, v) => { - truthValues[index] = !!v; - iterCb(err); - }); - }, err => { - if (err) return callback(err); - var results = []; - for (var i = 0; i < arr.length; i++) { - if (truthValues[i]) results.push(arr[i]); - } - callback(null, results); - }); - } +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; +} - function filterGeneric(eachfn, coll, iteratee, callback) { - var results = []; - eachfn(coll, (x, index, iterCb) => { - iteratee(x, (err, v) => { - if (err) return iterCb(err); - if (v) { - results.push({index, value: x}); - } - iterCb(err); - }); - }, err => { - if (err) return callback(err); - callback(null, results - .sort((a, b) => a.index - b.index) - .map(v => v.value)); - }); - } +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.s = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; + } + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); +} - function _filter(eachfn, coll, iteratee, callback) { - var filter = isArrayLike(coll) ? filterArray : filterGeneric; - return filter(eachfn, coll, wrapAsync(iteratee), callback); - } +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; + } + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; +}; - /** - * Returns a new array of all the values in `coll` which pass an async truth - * test. This operation is performed in parallel, but the results array will be - * in the same order as the original. - * - * @name filter - * @static - * @memberOf module:Collections - * @method - * @alias select - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {Function} iteratee - A truth test to apply to each item in `coll`. - * The `iteratee` is passed a `callback(err, truthValue)`, which must be called - * with a boolean argument once it has completed. Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results). - * @returns {Promise} a promise, if no callback provided - * @example - * - * async.filter(['file1','file2','file3'], function(filePath, callback) { - * fs.access(filePath, function(err) { - * callback(null, !err) - * }); - * }, function(err, results) { - * // results now equals an array of the existing files - * }); - */ - function filter (coll, iteratee, callback) { - return _filter(eachOf$1, coll, iteratee, callback) - } - var filter$1 = awaitify(filter, 3); +StringDecoder.prototype.end = utf8End; - /** - * The same as [`filter`]{@link module:Collections.filter} but runs a maximum of `limit` async operations at a - * time. - * - * @name filterLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.filter]{@link module:Collections.filter} - * @alias selectLimit - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {Function} iteratee - A truth test to apply to each item in `coll`. - * The `iteratee` is passed a `callback(err, truthValue)`, which must be called - * with a boolean argument once it has completed. Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results). - * @returns {Promise} a promise, if no callback provided - */ - function filterLimit (coll, limit, iteratee, callback) { - return _filter(eachOfLimit(limit), coll, iteratee, callback) +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; + +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; +}; + +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; +} + +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; } - var filterLimit$1 = awaitify(filterLimit, 4); + return nb; + } + return 0; +} - /** - * The same as [`filter`]{@link module:Collections.filter} but runs only a single async operation at a time. - * - * @name filterSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.filter]{@link module:Collections.filter} - * @alias selectSeries - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {Function} iteratee - A truth test to apply to each item in `coll`. - * The `iteratee` is passed a `callback(err, truthValue)`, which must be called - * with a boolean argument once it has completed. Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results) - * @returns {Promise} a promise, if no callback provided - */ - function filterSeries (coll, iteratee, callback) { - return _filter(eachOfSeries$1, coll, iteratee, callback) +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; + } + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; } - var filterSeries$1 = awaitify(filterSeries, 3); + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } + } + } +} - /** - * Calls the asynchronous function `fn` with a callback parameter that allows it - * to call itself again, in series, indefinitely. +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; +} - * If an error is passed to the callback then `errback` is called with the - * error, and execution stops, otherwise it will never be called. - * - * @name forever - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {AsyncFunction} fn - an async function to call repeatedly. - * Invoked with (next). - * @param {Function} [errback] - when `fn` passes an error to it's callback, - * this function will be called, and execution stops. Invoked with (err). - * @returns {Promise} a promise that rejects if an error occurs and an errback - * is not passed - * @example - * - * async.forever( - * function(next) { - * // next is suitable for passing to things that need a callback(err [, whatever]); - * // it will result in this function being called again. - * }, - * function(err) { - * // if next is called with a value in its first parameter, it will appear - * // in here as 'err', and execution will stop. - * } - * ); - */ - function forever(fn, errback) { - var done = onlyOnce(errback); - var task = wrapAsync(ensureAsync(fn)); +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); +} - function next(err) { - if (err) return done(err); - if (err === false) return; - task(next); - } - return next(); +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} + +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); + } } - var forever$1 = awaitify(forever, 2); + return r; + } + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); +} - /** - * The same as [`groupBy`]{@link module:Collections.groupBy} but runs a maximum of `limit` async operations at a time. - * - * @name groupByLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.groupBy]{@link module:Collections.groupBy} - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with a `key` to group the value under. - * Invoked with (value, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Result is an `Object` whoses - * properties are arrays of values which returned the corresponding key. - * @returns {Promise} a promise, if no callback is passed - */ - function groupByLimit(coll, limit, iteratee, callback) { - var _iteratee = wrapAsync(iteratee); - return mapLimit$1(coll, limit, (val, iterCb) => { - _iteratee(val, (err, key) => { - if (err) return iterCb(err); - return iterCb(err, {key, val}); - }); - }, (err, mapResults) => { - var result = {}; - // from MDN, handle object having an `hasOwnProperty` prop - var {hasOwnProperty} = Object.prototype; +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); + } + return r; +} - for (var i = 0; i < mapResults.length; i++) { - if (mapResults[i]) { - var {key} = mapResults[i]; - var {val} = mapResults[i]; +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + } + return buf.toString('base64', i, buf.length - n); +} - if (hasOwnProperty.call(result, key)) { - result[key].push(val); - } else { - result[key] = [val]; - } - } - } +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; +} - return callback(err, result); - }); - } +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); +} - var groupByLimit$1 = awaitify(groupByLimit, 4); +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} - /** - * Returns a new object, where each value corresponds to an array of items, from - * `coll`, that returned the corresponding key. That is, the keys of the object - * correspond to the values passed to the `iteratee` callback. - * - * Note: Since this function applies the `iteratee` to each item in parallel, - * there is no guarantee that the `iteratee` functions will complete in order. - * However, the values for each key in the `result` will be in the same order as - * the original `coll`. For Objects, the values will roughly be in the order of - * the original Objects' keys (but this can vary across JavaScript engines). - * - * @name groupBy - * @static - * @memberOf module:Collections - * @method - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with a `key` to group the value under. - * Invoked with (value, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Result is an `Object` whoses - * properties are arrays of values which returned the corresponding key. - * @returns {Promise} a promise, if no callback is passed - * @example - * - * async.groupBy(['userId1', 'userId2', 'userId3'], function(userId, callback) { - * db.findById(userId, function(err, user) { - * if (err) return callback(err); - * return callback(null, user.age); - * }); - * }, function(err, result) { - * // result is object containing the userIds grouped by age - * // e.g. { 30: ['userId1', 'userId3'], 42: ['userId2']}; - * }); - */ - function groupBy (coll, iteratee, callback) { - return groupByLimit$1(coll, Infinity, iteratee, callback) - } +/***/ }), - /** - * The same as [`groupBy`]{@link module:Collections.groupBy} but runs only a single async operation at a time. - * - * @name groupBySeries - * @static - * @memberOf module:Collections - * @method - * @see [async.groupBy]{@link module:Collections.groupBy} - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with a `key` to group the value under. - * Invoked with (value, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Result is an `Object` whoses - * properties are arrays of values which returned the corresponding key. - * @returns {Promise} a promise, if no callback is passed - */ - function groupBySeries (coll, iteratee, callback) { - return groupByLimit$1(coll, 1, iteratee, callback) - } +/***/ 10165: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - /** - * Logs the result of an `async` function to the `console`. Only works in - * Node.js or in browsers that support `console.log` and `console.error` (such - * as FF and Chrome). If multiple arguments are returned from the async - * function, `console.log` is called on each argument in order. - * - * @name log - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {AsyncFunction} function - The function you want to eventually apply - * all arguments to. - * @param {...*} arguments... - Any number of arguments to apply to the function. - * @example - * - * // in a module - * var hello = function(name, callback) { - * setTimeout(function() { - * callback(null, 'hello ' + name); - * }, 1000); - * }; - * - * // in the node repl - * node> async.log(hello, 'world'); - * 'hello world' - */ - var log = consoleFunc('log'); +"use strict"; - /** - * The same as [`mapValues`]{@link module:Collections.mapValues} but runs a maximum of `limit` async operations at a - * time. - * - * @name mapValuesLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.mapValues]{@link module:Collections.mapValues} - * @category Collection - * @param {Object} obj - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - A function to apply to each value and key - * in `coll`. - * The iteratee should complete with the transformed value as its result. - * Invoked with (value, key, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. `result` is a new object consisting - * of each key from `obj`, with each transformed value on the right-hand side. - * Invoked with (err, result). - * @returns {Promise} a promise, if no callback is passed - */ - function mapValuesLimit(obj, limit, iteratee, callback) { - callback = once(callback); - var newObj = {}; - var _iteratee = wrapAsync(iteratee); - return eachOfLimit(limit)(obj, (val, key, next) => { - _iteratee(val, key, (err, result) => { - if (err) return next(err); - newObj[key] = result; - next(err); - }); - }, err => callback(err, newObj)); - } +var EventEmitter = __nccwpck_require__(28614).EventEmitter +var util = __nccwpck_require__(31669) - var mapValuesLimit$1 = awaitify(mapValuesLimit, 4); +var trackerId = 0 +var TrackerBase = module.exports = function (name) { + EventEmitter.call(this) + this.id = ++trackerId + this.name = name +} +util.inherits(TrackerBase, EventEmitter) - /** - * A relative of [`map`]{@link module:Collections.map}, designed for use with objects. - * - * Produces a new Object by mapping each value of `obj` through the `iteratee` - * function. The `iteratee` is called each `value` and `key` from `obj` and a - * callback for when it has finished processing. Each of these callbacks takes - * two arguments: an `error`, and the transformed item from `obj`. If `iteratee` - * passes an error to its callback, the main `callback` (for the `mapValues` - * function) is immediately called with the error. - * - * Note, the order of the keys in the result is not guaranteed. The keys will - * be roughly in the order they complete, (but this is very engine-specific) - * - * @name mapValues - * @static - * @memberOf module:Collections - * @method - * @category Collection - * @param {Object} obj - A collection to iterate over. - * @param {AsyncFunction} iteratee - A function to apply to each value and key - * in `coll`. - * The iteratee should complete with the transformed value as its result. - * Invoked with (value, key, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. `result` is a new object consisting - * of each key from `obj`, with each transformed value on the right-hand side. - * Invoked with (err, result). - * @returns {Promise} a promise, if no callback is passed - * @example - * - * async.mapValues({ - * f1: 'file1', - * f2: 'file2', - * f3: 'file3' - * }, function (file, key, callback) { - * fs.stat(file, callback); - * }, function(err, result) { - * // result is now a map of stats for each file, e.g. - * // { - * // f1: [stats for file1], - * // f2: [stats for file2], - * // f3: [stats for file3] - * // } - * }); - */ - function mapValues(obj, iteratee, callback) { - return mapValuesLimit$1(obj, Infinity, iteratee, callback) - } - /** - * The same as [`mapValues`]{@link module:Collections.mapValues} but runs only a single async operation at a time. - * - * @name mapValuesSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.mapValues]{@link module:Collections.mapValues} - * @category Collection - * @param {Object} obj - A collection to iterate over. - * @param {AsyncFunction} iteratee - A function to apply to each value and key - * in `coll`. - * The iteratee should complete with the transformed value as its result. - * Invoked with (value, key, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. `result` is a new object consisting - * of each key from `obj`, with each transformed value on the right-hand side. - * Invoked with (err, result). - * @returns {Promise} a promise, if no callback is passed - */ - function mapValuesSeries(obj, iteratee, callback) { - return mapValuesLimit$1(obj, 1, iteratee, callback) - } +/***/ }), - /** - * Caches the results of an async function. When creating a hash to store - * function results against, the callback is omitted from the hash and an - * optional hash function can be used. - * - * **Note: if the async function errs, the result will not be cached and - * subsequent calls will call the wrapped function.** - * - * If no hash function is specified, the first argument is used as a hash key, - * which may work reasonably if it is a string or a data type that converts to a - * distinct string. Note that objects and arrays will not behave reasonably. - * Neither will cases where the other arguments are significant. In such cases, - * specify your own hash function. - * - * The cache of results is exposed as the `memo` property of the function - * returned by `memoize`. - * - * @name memoize - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {AsyncFunction} fn - The async function to proxy and cache results from. - * @param {Function} hasher - An optional function for generating a custom hash - * for storing results. It has all the arguments applied to it apart from the - * callback, and must be synchronous. - * @returns {AsyncFunction} a memoized version of `fn` - * @example - * - * var slow_fn = function(name, callback) { - * // do something - * callback(null, result); - * }; - * var fn = async.memoize(slow_fn); - * - * // fn can now be used as if it were slow_fn - * fn('some name', function() { - * // callback - * }); - */ - function memoize(fn, hasher = v => v) { - var memo = Object.create(null); - var queues = Object.create(null); - var _fn = wrapAsync(fn); - var memoized = initialParams((args, callback) => { - var key = hasher(...args); - if (key in memo) { - setImmediate$1(() => callback(null, ...memo[key])); - } else if (key in queues) { - queues[key].push(callback); - } else { - queues[key] = [callback]; - _fn(...args, (err, ...resultArgs) => { - // #1465 don't memoize if an error occurred - if (!err) { - memo[key] = resultArgs; - } - var q = queues[key]; - delete queues[key]; - for (var i = 0, l = q.length; i < l; i++) { - q[i](err, ...resultArgs); - } - }); - } - }); - memoized.memo = memo; - memoized.unmemoized = fn; - return memoized; +/***/ 60660: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var util = __nccwpck_require__(31669) +var TrackerBase = __nccwpck_require__(10165) +var Tracker = __nccwpck_require__(8074) +var TrackerStream = __nccwpck_require__(31375) + +var TrackerGroup = module.exports = function (name) { + TrackerBase.call(this, name) + this.parentGroup = null + this.trackers = [] + this.completion = {} + this.weight = {} + this.totalWeight = 0 + this.finished = false + this.bubbleChange = bubbleChange(this) +} +util.inherits(TrackerGroup, TrackerBase) + +function bubbleChange (trackerGroup) { + return function (name, completed, tracker) { + trackerGroup.completion[tracker.id] = completed + if (trackerGroup.finished) return + trackerGroup.emit('change', name || trackerGroup.name, trackerGroup.completed(), trackerGroup) + } +} + +TrackerGroup.prototype.nameInTree = function () { + var names = [] + var from = this + while (from) { + names.unshift(from.name) + from = from.parentGroup + } + return names.join('/') +} + +TrackerGroup.prototype.addUnit = function (unit, weight) { + if (unit.addUnit) { + var toTest = this + while (toTest) { + if (unit === toTest) { + throw new Error( + 'Attempted to add tracker group ' + + unit.name + ' to tree that already includes it ' + + this.nameInTree(this)) + } + toTest = toTest.parentGroup } + unit.parentGroup = this + } + this.weight[unit.id] = weight || 1 + this.totalWeight += this.weight[unit.id] + this.trackers.push(unit) + this.completion[unit.id] = unit.completed() + unit.on('change', this.bubbleChange) + if (!this.finished) this.emit('change', unit.name, this.completion[unit.id], unit) + return unit +} - /** - * Calls `callback` on a later loop around the event loop. In Node.js this just - * calls `process.nextTick`. In the browser it will use `setImmediate` if - * available, otherwise `setTimeout(callback, 0)`, which means other higher - * priority events may precede the execution of `callback`. - * - * This is used internally for browser-compatibility purposes. - * - * @name nextTick - * @static - * @memberOf module:Utils - * @method - * @see [async.setImmediate]{@link module:Utils.setImmediate} - * @category Util - * @param {Function} callback - The function to call on a later loop around - * the event loop. Invoked with (args...). - * @param {...*} args... - any number of additional arguments to pass to the - * callback on the next tick. - * @example - * - * var call_order = []; - * async.nextTick(function() { - * call_order.push('two'); - * // call_order now equals ['one','two'] - * }); - * call_order.push('one'); - * - * async.setImmediate(function (a, b, c) { - * // a, b, and c equal 1, 2, and 3 - * }, 1, 2, 3); - */ - var _defer$1; +TrackerGroup.prototype.completed = function () { + if (this.trackers.length === 0) return 0 + var valPerWeight = 1 / this.totalWeight + var completed = 0 + for (var ii = 0; ii < this.trackers.length; ii++) { + var trackerId = this.trackers[ii].id + completed += valPerWeight * this.weight[trackerId] * this.completion[trackerId] + } + return completed +} - if (hasNextTick) { - _defer$1 = process.nextTick; - } else if (hasSetImmediate) { - _defer$1 = setImmediate; +TrackerGroup.prototype.newGroup = function (name, weight) { + return this.addUnit(new TrackerGroup(name), weight) +} + +TrackerGroup.prototype.newItem = function (name, todo, weight) { + return this.addUnit(new Tracker(name, todo), weight) +} + +TrackerGroup.prototype.newStream = function (name, todo, weight) { + return this.addUnit(new TrackerStream(name, todo), weight) +} + +TrackerGroup.prototype.finish = function () { + this.finished = true + if (!this.trackers.length) this.addUnit(new Tracker(), 1, true) + for (var ii = 0; ii < this.trackers.length; ii++) { + var tracker = this.trackers[ii] + tracker.finish() + tracker.removeListener('change', this.bubbleChange) + } + this.emit('change', this.name, 1, this) +} + +var buffer = ' ' +TrackerGroup.prototype.debug = function (depth) { + depth = depth || 0 + var indent = depth ? buffer.substr(0, depth) : '' + var output = indent + (this.name || 'top') + ': ' + this.completed() + '\n' + this.trackers.forEach(function (tracker) { + if (tracker instanceof TrackerGroup) { + output += tracker.debug(depth + 1) } else { - _defer$1 = fallback; + output += indent + ' ' + tracker.name + ': ' + tracker.completed() + '\n' } + }) + return output +} - var nextTick = wrap(_defer$1); - var parallel = awaitify((eachfn, tasks, callback) => { - var results = isArrayLike(tasks) ? [] : {}; +/***/ }), - eachfn(tasks, (task, key, taskCb) => { - wrapAsync(task)((err, ...result) => { - if (result.length < 2) { - [result] = result; - } - results[key] = result; - taskCb(err); - }); - }, err => callback(err, results)); - }, 3); +/***/ 31375: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - /** - * Run the `tasks` collection of functions in parallel, without waiting until - * the previous function has completed. If any of the functions pass an error to - * its callback, the main `callback` is immediately called with the value of the - * error. Once the `tasks` have completed, the results are passed to the final - * `callback` as an array. - * - * **Note:** `parallel` is about kicking-off I/O tasks in parallel, not about - * parallel execution of code. If your tasks do not use any timers or perform - * any I/O, they will actually be executed in series. Any synchronous setup - * sections for each task will happen one after the other. JavaScript remains - * single-threaded. - * - * **Hint:** Use [`reflect`]{@link module:Utils.reflect} to continue the - * execution of other tasks when a task fails. - * - * It is also possible to use an object instead of an array. Each property will - * be run as a function and the results will be passed to the final `callback` - * as an object instead of an array. This can be a more readable way of handling - * results from {@link async.parallel}. - * - * @name parallel - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of - * [async functions]{@link AsyncFunction} to run. - * Each async function can complete with any number of optional `result` values. - * @param {Function} [callback] - An optional callback to run once all the - * functions have completed successfully. This function gets a results array - * (or object) containing all the result arguments passed to the task callbacks. - * Invoked with (err, results). - * @returns {Promise} a promise, if a callback is not passed - * - * @example - * async.parallel([ - * function(callback) { - * setTimeout(function() { - * callback(null, 'one'); - * }, 200); - * }, - * function(callback) { - * setTimeout(function() { - * callback(null, 'two'); - * }, 100); - * } - * ], - * // optional callback - * function(err, results) { - * // the results array will equal ['one','two'] even though - * // the second function had a shorter timeout. - * }); - * - * // an example using an object instead of an array - * async.parallel({ - * one: function(callback) { - * setTimeout(function() { - * callback(null, 1); - * }, 200); - * }, - * two: function(callback) { - * setTimeout(function() { - * callback(null, 2); - * }, 100); - * } - * }, function(err, results) { - * // results is now equals to: {one: 1, two: 2} - * }); - */ - function parallel$1(tasks, callback) { - return parallel(eachOf$1, tasks, callback); - } +"use strict"; - /** - * The same as [`parallel`]{@link module:ControlFlow.parallel} but runs a maximum of `limit` async operations at a - * time. - * - * @name parallelLimit - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.parallel]{@link module:ControlFlow.parallel} - * @category Control Flow - * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of - * [async functions]{@link AsyncFunction} to run. - * Each async function can complete with any number of optional `result` values. - * @param {number} limit - The maximum number of async operations at a time. - * @param {Function} [callback] - An optional callback to run once all the - * functions have completed successfully. This function gets a results array - * (or object) containing all the result arguments passed to the task callbacks. - * Invoked with (err, results). - * @returns {Promise} a promise, if a callback is not passed - */ - function parallelLimit(tasks, limit, callback) { - return parallel(eachOfLimit(limit), tasks, callback); - } +var util = __nccwpck_require__(31669) +var stream = __nccwpck_require__(64043) +var delegate = __nccwpck_require__(61318) +var Tracker = __nccwpck_require__(8074) - /** - * A queue of tasks for the worker function to complete. - * @typedef {Iterable} QueueObject - * @memberOf module:ControlFlow - * @property {Function} length - a function returning the number of items - * waiting to be processed. Invoke with `queue.length()`. - * @property {boolean} started - a boolean indicating whether or not any - * items have been pushed and processed by the queue. - * @property {Function} running - a function returning the number of items - * currently being processed. Invoke with `queue.running()`. - * @property {Function} workersList - a function returning the array of items - * currently being processed. Invoke with `queue.workersList()`. - * @property {Function} idle - a function returning false if there are items - * waiting or being processed, or true if not. Invoke with `queue.idle()`. - * @property {number} concurrency - an integer for determining how many `worker` - * functions should be run in parallel. This property can be changed after a - * `queue` is created to alter the concurrency on-the-fly. - * @property {number} payload - an integer that specifies how many items are - * passed to the worker function at a time. only applies if this is a - * [cargo]{@link module:ControlFlow.cargo} object - * @property {AsyncFunction} push - add a new task to the `queue`. Calls `callback` - * once the `worker` has finished processing the task. Instead of a single task, - * a `tasks` array can be submitted. The respective callback is used for every - * task in the list. Invoke with `queue.push(task, [callback])`, - * @property {AsyncFunction} unshift - add a new task to the front of the `queue`. - * Invoke with `queue.unshift(task, [callback])`. - * @property {AsyncFunction} pushAsync - the same as `q.push`, except this returns - * a promise that rejects if an error occurs. - * @property {AsyncFunction} unshirtAsync - the same as `q.unshift`, except this returns - * a promise that rejects if an error occurs. - * @property {Function} remove - remove items from the queue that match a test - * function. The test function will be passed an object with a `data` property, - * and a `priority` property, if this is a - * [priorityQueue]{@link module:ControlFlow.priorityQueue} object. - * Invoked with `queue.remove(testFn)`, where `testFn` is of the form - * `function ({data, priority}) {}` and returns a Boolean. - * @property {Function} saturated - a function that sets a callback that is - * called when the number of running workers hits the `concurrency` limit, and - * further tasks will be queued. If the callback is omitted, `q.saturated()` - * returns a promise for the next occurrence. - * @property {Function} unsaturated - a function that sets a callback that is - * called when the number of running workers is less than the `concurrency` & - * `buffer` limits, and further tasks will not be queued. If the callback is - * omitted, `q.unsaturated()` returns a promise for the next occurrence. - * @property {number} buffer - A minimum threshold buffer in order to say that - * the `queue` is `unsaturated`. - * @property {Function} empty - a function that sets a callback that is called - * when the last item from the `queue` is given to a `worker`. If the callback - * is omitted, `q.empty()` returns a promise for the next occurrence. - * @property {Function} drain - a function that sets a callback that is called - * when the last item from the `queue` has returned from the `worker`. If the - * callback is omitted, `q.drain()` returns a promise for the next occurrence. - * @property {Function} error - a function that sets a callback that is called - * when a task errors. Has the signature `function(error, task)`. If the - * callback is omitted, `error()` returns a promise that rejects on the next - * error. - * @property {boolean} paused - a boolean for determining whether the queue is - * in a paused state. - * @property {Function} pause - a function that pauses the processing of tasks - * until `resume()` is called. Invoke with `queue.pause()`. - * @property {Function} resume - a function that resumes the processing of - * queued tasks when the queue is paused. Invoke with `queue.resume()`. - * @property {Function} kill - a function that removes the `drain` callback and - * empties remaining tasks from the queue forcing it to go idle. No more tasks - * should be pushed to the queue after calling this function. Invoke with `queue.kill()`. - * - * @example - * const q = aync.queue(worker, 2) - * q.push(item1) - * q.push(item2) - * q.push(item3) - * // queues are iterable, spread into an array to inspect - * const items = [...q] // [item1, item2, item3] - * // or use for of - * for (let item of q) { - * console.log(item) - * } - * - * q.drain(() => { - * console.log('all done') - * }) - * // or - * await q.drain() - */ +var TrackerStream = module.exports = function (name, size, options) { + stream.Transform.call(this, options) + this.tracker = new Tracker(name, size) + this.name = name + this.id = this.tracker.id + this.tracker.on('change', delegateChange(this)) +} +util.inherits(TrackerStream, stream.Transform) - /** - * Creates a `queue` object with the specified `concurrency`. Tasks added to the - * `queue` are processed in parallel (up to the `concurrency` limit). If all - * `worker`s are in progress, the task is queued until one becomes available. - * Once a `worker` completes a `task`, that `task`'s callback is called. - * - * @name queue - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {AsyncFunction} worker - An async function for processing a queued task. - * If you want to handle errors from an individual task, pass a callback to - * `q.push()`. Invoked with (task, callback). - * @param {number} [concurrency=1] - An `integer` for determining how many - * `worker` functions should be run in parallel. If omitted, the concurrency - * defaults to `1`. If the concurrency is `0`, an error is thrown. - * @returns {module:ControlFlow.QueueObject} A queue object to manage the tasks. Callbacks can be - * attached as certain properties to listen for specific events during the - * lifecycle of the queue. - * @example - * - * // create a queue object with concurrency 2 - * var q = async.queue(function(task, callback) { - * console.log('hello ' + task.name); - * callback(); - * }, 2); - * - * // assign a callback - * q.drain(function() { - * console.log('all items have been processed'); - * }); - * // or await the end - * await q.drain() - * - * // assign an error callback - * q.error(function(err, task) { - * console.error('task experienced an error'); - * }); - * - * // add some items to the queue - * q.push({name: 'foo'}, function(err) { - * console.log('finished processing foo'); - * }); - * // callback is optional - * q.push({name: 'bar'}); - * - * // add some items to the queue (batch-wise) - * q.push([{name: 'baz'},{name: 'bay'},{name: 'bax'}], function(err) { - * console.log('finished processing item'); - * }); - * - * // add some items to the front of the queue - * q.unshift({name: 'bar'}, function (err) { - * console.log('finished processing bar'); - * }); - */ - function queue$1 (worker, concurrency) { - var _worker = wrapAsync(worker); - return queue((items, cb) => { - _worker(items[0], cb); - }, concurrency, 1); - } +function delegateChange (trackerStream) { + return function (name, completion, tracker) { + trackerStream.emit('change', name, completion, trackerStream) + } +} - // Binary min-heap implementation used for priority queue. - // Implementation is stable, i.e. push time is considered for equal priorities - class Heap { - constructor() { - this.heap = []; - this.pushCount = Number.MIN_SAFE_INTEGER; - } +TrackerStream.prototype._transform = function (data, encoding, cb) { + this.tracker.completeWork(data.length ? data.length : 1) + this.push(data) + cb() +} - get length() { - return this.heap.length; - } +TrackerStream.prototype._flush = function (cb) { + this.tracker.finish() + cb() +} - empty () { - this.heap = []; - return this; - } +delegate(TrackerStream.prototype, 'tracker') + .method('completed') + .method('addWork') + .method('finish') - percUp(index) { - let p; - while (index > 0 && smaller(this.heap[index], this.heap[p=parent(index)])) { - let t = this.heap[index]; - this.heap[index] = this.heap[p]; - this.heap[p] = t; +/***/ }), - index = p; - } - } +/***/ 8074: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - percDown(index) { - let l; +"use strict"; - while ((l=leftChi(index)) < this.heap.length) { - if (l+1 < this.heap.length && smaller(this.heap[l+1], this.heap[l])) { - l = l+1; - } +var util = __nccwpck_require__(31669) +var TrackerBase = __nccwpck_require__(10165) - if (smaller(this.heap[index], this.heap[l])) { - break; - } +var Tracker = module.exports = function (name, todo) { + TrackerBase.call(this, name) + this.workDone = 0 + this.workTodo = todo || 0 +} +util.inherits(Tracker, TrackerBase) - let t = this.heap[index]; - this.heap[index] = this.heap[l]; - this.heap[l] = t; +Tracker.prototype.completed = function () { + return this.workTodo === 0 ? 0 : this.workDone / this.workTodo +} - index = l; - } - } +Tracker.prototype.addWork = function (work) { + this.workTodo += work + this.emit('change', this.name, this.completed(), this) +} - push(node) { - node.pushCount = ++this.pushCount; - this.heap.push(node); - this.percUp(this.heap.length-1); - } +Tracker.prototype.completeWork = function (work) { + this.workDone += work + if (this.workDone > this.workTodo) this.workDone = this.workTodo + this.emit('change', this.name, this.completed(), this) +} - unshift(node) { - return this.heap.push(node); - } +Tracker.prototype.finish = function () { + this.workTodo = this.workDone = 1 + this.emit('change', this.name, 1, this) +} - shift() { - let [top] = this.heap; - this.heap[0] = this.heap[this.heap.length-1]; - this.heap.pop(); - this.percDown(0); +/***/ }), - return top; - } +/***/ 99348: +/***/ ((module) => { - toArray() { - return [...this]; - } +// Copyright 2011 Mark Cavage All rights reserved. - *[Symbol.iterator] () { - for (let i = 0; i < this.heap.length; i++) { - yield this.heap[i].data; - } - } - remove (testFn) { - let j = 0; - for (let i = 0; i < this.heap.length; i++) { - if (!testFn(this.heap[i])) { - this.heap[j] = this.heap[i]; - j++; - } - } +module.exports = { - this.heap.splice(j); + newInvalidAsn1Error: function (msg) { + var e = new Error(); + e.name = 'InvalidAsn1Error'; + e.message = msg || ''; + return e; + } - for (let i = parent(this.heap.length-1); i >= 0; i--) { - this.percDown(i); - } +}; - return this; - } - } - function leftChi(i) { - return (i<<1)+1; - } +/***/ }), - function parent(i) { - return ((i+1)>>1)-1; - } +/***/ 194: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - function smaller(x, y) { - if (x.priority !== y.priority) { - return x.priority < y.priority; - } - else { - return x.pushCount < y.pushCount; - } - } +// Copyright 2011 Mark Cavage All rights reserved. - /** - * The same as [async.queue]{@link module:ControlFlow.queue} only tasks are assigned a priority and - * completed in ascending priority order. - * - * @name priorityQueue - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.queue]{@link module:ControlFlow.queue} - * @category Control Flow - * @param {AsyncFunction} worker - An async function for processing a queued task. - * If you want to handle errors from an individual task, pass a callback to - * `q.push()`. - * Invoked with (task, callback). - * @param {number} concurrency - An `integer` for determining how many `worker` - * functions should be run in parallel. If omitted, the concurrency defaults to - * `1`. If the concurrency is `0`, an error is thrown. - * @returns {module:ControlFlow.QueueObject} A priorityQueue object to manage the tasks. There are two - * differences between `queue` and `priorityQueue` objects: - * * `push(task, priority, [callback])` - `priority` should be a number. If an - * array of `tasks` is given, all tasks will be assigned the same priority. - * * The `unshift` method was removed. - */ - function priorityQueue(worker, concurrency) { - // Start with a normal queue - var q = queue$1(worker, concurrency); +var errors = __nccwpck_require__(99348); +var types = __nccwpck_require__(42473); - q._tasks = new Heap(); +var Reader = __nccwpck_require__(20290); +var Writer = __nccwpck_require__(43200); - // Override push to accept second parameter representing priority - q.push = function(data, priority = 0, callback = () => {}) { - if (typeof callback !== 'function') { - throw new Error('task callback must be a function'); - } - q.started = true; - if (!Array.isArray(data)) { - data = [data]; - } - if (data.length === 0 && q.idle()) { - // call drain immediately if there are no tasks - return setImmediate$1(() => q.drain()); - } - for (var i = 0, l = data.length; i < l; i++) { - var item = { - data: data[i], - priority, - callback - }; +// --- Exports - q._tasks.push(item); - } +module.exports = { - setImmediate$1(q.process); - }; + Reader: Reader, - // Remove unshift function - delete q.unshift; + Writer: Writer - return q; - } +}; - /** - * Runs the `tasks` array of functions in parallel, without waiting until the - * previous function has completed. Once any of the `tasks` complete or pass an - * error to its callback, the main `callback` is immediately called. It's - * equivalent to `Promise.race()`. - * - * @name race - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Array} tasks - An array containing [async functions]{@link AsyncFunction} - * to run. Each function can complete with an optional `result` value. - * @param {Function} callback - A callback to run once any of the functions have - * completed. This function gets an error or result from the first function that - * completed. Invoked with (err, result). - * @returns undefined - * @example - * - * async.race([ - * function(callback) { - * setTimeout(function() { - * callback(null, 'one'); - * }, 200); - * }, - * function(callback) { - * setTimeout(function() { - * callback(null, 'two'); - * }, 100); - * } - * ], - * // main callback - * function(err, result) { - * // the result will be equal to 'two' as it finishes earlier - * }); - */ - function race(tasks, callback) { - callback = once(callback); - if (!Array.isArray(tasks)) return callback(new TypeError('First argument to race must be an array of functions')); - if (!tasks.length) return callback(); - for (var i = 0, l = tasks.length; i < l; i++) { - wrapAsync(tasks[i])(callback); - } - } +for (var t in types) { + if (types.hasOwnProperty(t)) + module.exports[t] = types[t]; +} +for (var e in errors) { + if (errors.hasOwnProperty(e)) + module.exports[e] = errors[e]; +} - var race$1 = awaitify(race, 2); - /** - * Same as [`reduce`]{@link module:Collections.reduce}, only operates on `array` in reverse order. - * - * @name reduceRight - * @static - * @memberOf module:Collections - * @method - * @see [async.reduce]{@link module:Collections.reduce} - * @alias foldr - * @category Collection - * @param {Array} array - A collection to iterate over. - * @param {*} memo - The initial state of the reduction. - * @param {AsyncFunction} iteratee - A function applied to each item in the - * array to produce the next step in the reduction. - * The `iteratee` should complete with the next state of the reduction. - * If the iteratee complete with an error, the reduction is stopped and the - * main `callback` is immediately called with the error. - * Invoked with (memo, item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result is the reduced value. Invoked with - * (err, result). - * @returns {Promise} a promise, if no callback is passed - */ - function reduceRight (array, memo, iteratee, callback) { - var reversed = [...array].reverse(); - return reduce$1(reversed, memo, iteratee, callback); - } +/***/ }), - /** - * Wraps the async function in another function that always completes with a - * result object, even when it errors. - * - * The result object has either the property `error` or `value`. - * - * @name reflect - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {AsyncFunction} fn - The async function you want to wrap - * @returns {Function} - A function that always passes null to it's callback as - * the error. The second argument to the callback will be an `object` with - * either an `error` or a `value` property. - * @example - * - * async.parallel([ - * async.reflect(function(callback) { - * // do some stuff ... - * callback(null, 'one'); - * }), - * async.reflect(function(callback) { - * // do some more stuff but error ... - * callback('bad stuff happened'); - * }), - * async.reflect(function(callback) { - * // do some more stuff ... - * callback(null, 'two'); - * }) - * ], - * // optional callback - * function(err, results) { - * // values - * // results[0].value = 'one' - * // results[1].error = 'bad stuff happened' - * // results[2].value = 'two' - * }); - */ - function reflect(fn) { - var _fn = wrapAsync(fn); - return initialParams(function reflectOn(args, reflectCallback) { - args.push((error, ...cbArgs) => { - let retVal = {}; - if (error) { - retVal.error = error; - } - if (cbArgs.length > 0){ - var value = cbArgs; - if (cbArgs.length <= 1) { - [value] = cbArgs; - } - retVal.value = value; - } - reflectCallback(null, retVal); - }); +/***/ 20290: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - return _fn.apply(this, args); - }); - } +// Copyright 2011 Mark Cavage All rights reserved. - /** - * A helper function that wraps an array or an object of functions with `reflect`. - * - * @name reflectAll - * @static - * @memberOf module:Utils - * @method - * @see [async.reflect]{@link module:Utils.reflect} - * @category Util - * @param {Array|Object|Iterable} tasks - The collection of - * [async functions]{@link AsyncFunction} to wrap in `async.reflect`. - * @returns {Array} Returns an array of async functions, each wrapped in - * `async.reflect` - * @example - * - * let tasks = [ - * function(callback) { - * setTimeout(function() { - * callback(null, 'one'); - * }, 200); - * }, - * function(callback) { - * // do some more stuff but error ... - * callback(new Error('bad stuff happened')); - * }, - * function(callback) { - * setTimeout(function() { - * callback(null, 'two'); - * }, 100); - * } - * ]; - * - * async.parallel(async.reflectAll(tasks), - * // optional callback - * function(err, results) { - * // values - * // results[0].value = 'one' - * // results[1].error = Error('bad stuff happened') - * // results[2].value = 'two' - * }); - * - * // an example using an object instead of an array - * let tasks = { - * one: function(callback) { - * setTimeout(function() { - * callback(null, 'one'); - * }, 200); - * }, - * two: function(callback) { - * callback('two'); - * }, - * three: function(callback) { - * setTimeout(function() { - * callback(null, 'three'); - * }, 100); - * } - * }; - * - * async.parallel(async.reflectAll(tasks), - * // optional callback - * function(err, results) { - * // values - * // results.one.value = 'one' - * // results.two.error = 'two' - * // results.three.value = 'three' - * }); - */ - function reflectAll(tasks) { - var results; - if (Array.isArray(tasks)) { - results = tasks.map(reflect); - } else { - results = {}; - Object.keys(tasks).forEach(key => { - results[key] = reflect.call(this, tasks[key]); - }); - } - return results; - } +var assert = __nccwpck_require__(42357); +var Buffer = __nccwpck_require__(15118).Buffer; - function reject(eachfn, arr, _iteratee, callback) { - const iteratee = wrapAsync(_iteratee); - return _filter(eachfn, arr, (value, cb) => { - iteratee(value, (err, v) => { - cb(err, !v); - }); - }, callback); - } +var ASN1 = __nccwpck_require__(42473); +var errors = __nccwpck_require__(99348); - /** - * The opposite of [`filter`]{@link module:Collections.filter}. Removes values that pass an `async` truth test. - * - * @name reject - * @static - * @memberOf module:Collections - * @method - * @see [async.filter]{@link module:Collections.filter} - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {Function} iteratee - An async truth test to apply to each item in - * `coll`. - * The should complete with a boolean value as its `result`. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results). - * @returns {Promise} a promise, if no callback is passed - * @example - * - * async.reject(['file1','file2','file3'], function(filePath, callback) { - * fs.access(filePath, function(err) { - * callback(null, !err) - * }); - * }, function(err, results) { - * // results now equals an array of missing files - * createFiles(results); - * }); - */ - function reject$1 (coll, iteratee, callback) { - return reject(eachOf$1, coll, iteratee, callback) - } - var reject$2 = awaitify(reject$1, 3); - /** - * The same as [`reject`]{@link module:Collections.reject} but runs a maximum of `limit` async operations at a - * time. - * - * @name rejectLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.reject]{@link module:Collections.reject} - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {Function} iteratee - An async truth test to apply to each item in - * `coll`. - * The should complete with a boolean value as its `result`. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results). - * @returns {Promise} a promise, if no callback is passed - */ - function rejectLimit (coll, limit, iteratee, callback) { - return reject(eachOfLimit(limit), coll, iteratee, callback) - } - var rejectLimit$1 = awaitify(rejectLimit, 4); +// --- Globals - /** - * The same as [`reject`]{@link module:Collections.reject} but runs only a single async operation at a time. - * - * @name rejectSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.reject]{@link module:Collections.reject} - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {Function} iteratee - An async truth test to apply to each item in - * `coll`. - * The should complete with a boolean value as its `result`. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results). - * @returns {Promise} a promise, if no callback is passed - */ - function rejectSeries (coll, iteratee, callback) { - return reject(eachOfSeries$1, coll, iteratee, callback) - } - var rejectSeries$1 = awaitify(rejectSeries, 3); +var newInvalidAsn1Error = errors.newInvalidAsn1Error; - function constant$1(value) { - return function () { - return value; - } - } - /** - * Attempts to get a successful response from `task` no more than `times` times - * before returning an error. If the task is successful, the `callback` will be - * passed the result of the successful task. If all attempts fail, the callback - * will be passed the error and result (if any) of the final attempt. - * - * @name retry - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @see [async.retryable]{@link module:ControlFlow.retryable} - * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - Can be either an - * object with `times` and `interval` or a number. - * * `times` - The number of attempts to make before giving up. The default - * is `5`. - * * `interval` - The time to wait between retries, in milliseconds. The - * default is `0`. The interval may also be specified as a function of the - * retry count (see example). - * * `errorFilter` - An optional synchronous function that is invoked on - * erroneous result. If it returns `true` the retry attempts will continue; - * if the function returns `false` the retry flow is aborted with the current - * attempt's error and result being returned to the final callback. - * Invoked with (err). - * * If `opts` is a number, the number specifies the number of times to retry, - * with the default interval of `0`. - * @param {AsyncFunction} task - An async function to retry. - * Invoked with (callback). - * @param {Function} [callback] - An optional callback which is called when the - * task has succeeded, or after the final failed attempt. It receives the `err` - * and `result` arguments of the last attempt at completing the `task`. Invoked - * with (err, results). - * @returns {Promise} a promise if no callback provided - * - * @example - * - * // The `retry` function can be used as a stand-alone control flow by passing - * // a callback, as shown below: - * - * // try calling apiMethod 3 times - * async.retry(3, apiMethod, function(err, result) { - * // do something with the result - * }); - * - * // try calling apiMethod 3 times, waiting 200 ms between each retry - * async.retry({times: 3, interval: 200}, apiMethod, function(err, result) { - * // do something with the result - * }); - * - * // try calling apiMethod 10 times with exponential backoff - * // (i.e. intervals of 100, 200, 400, 800, 1600, ... milliseconds) - * async.retry({ - * times: 10, - * interval: function(retryCount) { - * return 50 * Math.pow(2, retryCount); - * } - * }, apiMethod, function(err, result) { - * // do something with the result - * }); - * - * // try calling apiMethod the default 5 times no delay between each retry - * async.retry(apiMethod, function(err, result) { - * // do something with the result - * }); - * - * // try calling apiMethod only when error condition satisfies, all other - * // errors will abort the retry control flow and return to final callback - * async.retry({ - * errorFilter: function(err) { - * return err.message === 'Temporary error'; // only retry on a specific error - * } - * }, apiMethod, function(err, result) { - * // do something with the result - * }); - * - * // to retry individual methods that are not as reliable within other - * // control flow functions, use the `retryable` wrapper: - * async.auto({ - * users: api.getUsers.bind(api), - * payments: async.retryable(3, api.getPayments.bind(api)) - * }, function(err, results) { - * // do something with the results - * }); - * - */ - const DEFAULT_TIMES = 5; - const DEFAULT_INTERVAL = 0; - function retry(opts, task, callback) { - var options = { - times: DEFAULT_TIMES, - intervalFunc: constant$1(DEFAULT_INTERVAL) - }; +// --- API - if (arguments.length < 3 && typeof opts === 'function') { - callback = task || promiseCallback(); - task = opts; - } else { - parseTimes(options, opts); - callback = callback || promiseCallback(); - } +function Reader(data) { + if (!data || !Buffer.isBuffer(data)) + throw new TypeError('data must be a node Buffer'); - if (typeof task !== 'function') { - throw new Error("Invalid arguments for async.retry"); - } + this._buf = data; + this._size = data.length; - var _task = wrapAsync(task); + // These hold the "current" state + this._len = 0; + this._offset = 0; +} - var attempt = 1; - function retryAttempt() { - _task((err, ...args) => { - if (err === false) return - if (err && attempt++ < options.times && - (typeof options.errorFilter != 'function' || - options.errorFilter(err))) { - setTimeout(retryAttempt, options.intervalFunc(attempt - 1)); - } else { - callback(err, ...args); - } - }); - } +Object.defineProperty(Reader.prototype, 'length', { + enumerable: true, + get: function () { return (this._len); } +}); - retryAttempt(); - return callback[PROMISE_SYMBOL] - } +Object.defineProperty(Reader.prototype, 'offset', { + enumerable: true, + get: function () { return (this._offset); } +}); - function parseTimes(acc, t) { - if (typeof t === 'object') { - acc.times = +t.times || DEFAULT_TIMES; +Object.defineProperty(Reader.prototype, 'remain', { + get: function () { return (this._size - this._offset); } +}); - acc.intervalFunc = typeof t.interval === 'function' ? - t.interval : - constant$1(+t.interval || DEFAULT_INTERVAL); +Object.defineProperty(Reader.prototype, 'buffer', { + get: function () { return (this._buf.slice(this._offset)); } +}); - acc.errorFilter = t.errorFilter; - } else if (typeof t === 'number' || typeof t === 'string') { - acc.times = +t || DEFAULT_TIMES; - } else { - throw new Error("Invalid arguments for async.retry"); - } - } - /** - * A close relative of [`retry`]{@link module:ControlFlow.retry}. This method - * wraps a task and makes it retryable, rather than immediately calling it - * with retries. - * - * @name retryable - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.retry]{@link module:ControlFlow.retry} - * @category Control Flow - * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - optional - * options, exactly the same as from `retry`, except for a `opts.arity` that - * is the arity of the `task` function, defaulting to `task.length` - * @param {AsyncFunction} task - the asynchronous function to wrap. - * This function will be passed any arguments passed to the returned wrapper. - * Invoked with (...args, callback). - * @returns {AsyncFunction} The wrapped function, which when invoked, will - * retry on an error, based on the parameters specified in `opts`. - * This function will accept the same parameters as `task`. - * @example - * - * async.auto({ - * dep1: async.retryable(3, getFromFlakyService), - * process: ["dep1", async.retryable(3, function (results, cb) { - * maybeProcessData(results.dep1, cb); - * })] - * }, callback); - */ - function retryable (opts, task) { - if (!task) { - task = opts; - opts = null; - } - let arity = (opts && opts.arity) || task.length; - if (isAsync(task)) { - arity += 1; - } - var _task = wrapAsync(task); - return initialParams((args, callback) => { - if (args.length < arity - 1 || callback == null) { - args.push(callback); - callback = promiseCallback(); - } - function taskFn(cb) { - _task(...args, cb); - } +/** + * Reads a single byte and advances offset; you can pass in `true` to make this + * a "peek" operation (i.e., get the byte, but don't advance the offset). + * + * @param {Boolean} peek true means don't move offset. + * @return {Number} the next byte, null if not enough data. + */ +Reader.prototype.readByte = function (peek) { + if (this._size - this._offset < 1) + return null; - if (opts) retry(opts, taskFn, callback); - else retry(taskFn, callback); + var b = this._buf[this._offset] & 0xff; - return callback[PROMISE_SYMBOL] - }); - } + if (!peek) + this._offset += 1; - /** - * Run the functions in the `tasks` collection in series, each one running once - * the previous function has completed. If any functions in the series pass an - * error to its callback, no more functions are run, and `callback` is - * immediately called with the value of the error. Otherwise, `callback` - * receives an array of results when `tasks` have completed. - * - * It is also possible to use an object instead of an array. Each property will - * be run as a function, and the results will be passed to the final `callback` - * as an object instead of an array. This can be a more readable way of handling - * results from {@link async.series}. - * - * **Note** that while many implementations preserve the order of object - * properties, the [ECMAScript Language Specification](http://www.ecma-international.org/ecma-262/5.1/#sec-8.6) - * explicitly states that - * - * > The mechanics and order of enumerating the properties is not specified. - * - * So if you rely on the order in which your series of functions are executed, - * and want this to work on all platforms, consider using an array. - * - * @name series - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing - * [async functions]{@link AsyncFunction} to run in series. - * Each function can complete with any number of optional `result` values. - * @param {Function} [callback] - An optional callback to run once all the - * functions have completed. This function gets a results array (or object) - * containing all the result arguments passed to the `task` callbacks. Invoked - * with (err, result). - * @return {Promise} a promise, if no callback is passed - * @example - * async.series([ - * function(callback) { - * // do some stuff ... - * callback(null, 'one'); - * }, - * function(callback) { - * // do some more stuff ... - * callback(null, 'two'); - * } - * ], - * // optional callback - * function(err, results) { - * // results is now equal to ['one', 'two'] - * }); - * - * async.series({ - * one: function(callback) { - * setTimeout(function() { - * callback(null, 1); - * }, 200); - * }, - * two: function(callback){ - * setTimeout(function() { - * callback(null, 2); - * }, 100); - * } - * }, function(err, results) { - * // results is now equal to: {one: 1, two: 2} - * }); - */ - function series(tasks, callback) { - return parallel(eachOfSeries$1, tasks, callback); - } + return b; +}; - /** - * Returns `true` if at least one element in the `coll` satisfies an async test. - * If any iteratee call returns `true`, the main `callback` is immediately - * called. - * - * @name some - * @static - * @memberOf module:Collections - * @method - * @alias any - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collections in parallel. - * The iteratee should complete with a boolean `result` value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the iteratee functions have finished. - * Result will be either `true` or `false` depending on the values of the async - * tests. Invoked with (err, result). - * @returns {Promise} a promise, if no callback provided - * @example - * - * async.some(['file1','file2','file3'], function(filePath, callback) { - * fs.access(filePath, function(err) { - * callback(null, !err) - * }); - * }, function(err, result) { - * // if result is true then at least one of the files exists - * }); - */ - function some(coll, iteratee, callback) { - return _createTester(Boolean, res => res)(eachOf$1, coll, iteratee, callback) - } - var some$1 = awaitify(some, 3); - /** - * The same as [`some`]{@link module:Collections.some} but runs a maximum of `limit` async operations at a time. - * - * @name someLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.some]{@link module:Collections.some} - * @alias anyLimit - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collections in parallel. - * The iteratee should complete with a boolean `result` value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the iteratee functions have finished. - * Result will be either `true` or `false` depending on the values of the async - * tests. Invoked with (err, result). - * @returns {Promise} a promise, if no callback provided - */ - function someLimit(coll, limit, iteratee, callback) { - return _createTester(Boolean, res => res)(eachOfLimit(limit), coll, iteratee, callback) - } - var someLimit$1 = awaitify(someLimit, 4); +Reader.prototype.peek = function () { + return this.readByte(true); +}; - /** - * The same as [`some`]{@link module:Collections.some} but runs only a single async operation at a time. - * - * @name someSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.some]{@link module:Collections.some} - * @alias anySeries - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collections in series. - * The iteratee should complete with a boolean `result` value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the iteratee functions have finished. - * Result will be either `true` or `false` depending on the values of the async - * tests. Invoked with (err, result). - * @returns {Promise} a promise, if no callback provided - */ - function someSeries(coll, iteratee, callback) { - return _createTester(Boolean, res => res)(eachOfSeries$1, coll, iteratee, callback) - } - var someSeries$1 = awaitify(someSeries, 3); - /** - * Sorts a list by the results of running each `coll` value through an async - * `iteratee`. - * - * @name sortBy - * @static - * @memberOf module:Collections - * @method - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with a value to use as the sort criteria as - * its `result`. - * Invoked with (item, callback). - * @param {Function} callback - A callback which is called after all the - * `iteratee` functions have finished, or an error occurs. Results is the items - * from the original `coll` sorted by the values returned by the `iteratee` - * calls. Invoked with (err, results). - * @returns {Promise} a promise, if no callback passed - * @example - * - * async.sortBy(['file1','file2','file3'], function(file, callback) { - * fs.stat(file, function(err, stats) { - * callback(err, stats.mtime); - * }); - * }, function(err, results) { - * // results is now the original array of files sorted by - * // modified date - * }); - * - * // By modifying the callback parameter the - * // sorting order can be influenced: - * - * // ascending order - * async.sortBy([1,9,3,5], function(x, callback) { - * callback(null, x); - * }, function(err,result) { - * // result callback - * }); - * - * // descending order - * async.sortBy([1,9,3,5], function(x, callback) { - * callback(null, x*-1); //<- x*-1 instead of x, turns the order around - * }, function(err,result) { - * // result callback - * }); - */ - function sortBy (coll, iteratee, callback) { - var _iteratee = wrapAsync(iteratee); - return map$1(coll, (x, iterCb) => { - _iteratee(x, (err, criteria) => { - if (err) return iterCb(err); - iterCb(err, {value: x, criteria}); - }); - }, (err, results) => { - if (err) return callback(err); - callback(null, results.sort(comparator).map(v => v.value)); - }); +/** + * Reads a (potentially) variable length off the BER buffer. This call is + * not really meant to be called directly, as callers have to manipulate + * the internal buffer afterwards. + * + * As a result of this call, you can call `Reader.length`, until the + * next thing called that does a readLength. + * + * @return {Number} the amount of offset to advance the buffer. + * @throws {InvalidAsn1Error} on bad ASN.1 + */ +Reader.prototype.readLength = function (offset) { + if (offset === undefined) + offset = this._offset; - function comparator(left, right) { - var a = left.criteria, b = right.criteria; - return a < b ? -1 : a > b ? 1 : 0; - } - } - var sortBy$1 = awaitify(sortBy, 3); + if (offset >= this._size) + return null; - /** - * Sets a time limit on an asynchronous function. If the function does not call - * its callback within the specified milliseconds, it will be called with a - * timeout error. The code property for the error object will be `'ETIMEDOUT'`. - * - * @name timeout - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {AsyncFunction} asyncFn - The async function to limit in time. - * @param {number} milliseconds - The specified time limit. - * @param {*} [info] - Any variable you want attached (`string`, `object`, etc) - * to timeout Error for more information.. - * @returns {AsyncFunction} Returns a wrapped function that can be used with any - * of the control flow functions. - * Invoke this function with the same parameters as you would `asyncFunc`. - * @example - * - * function myFunction(foo, callback) { - * doAsyncTask(foo, function(err, data) { - * // handle errors - * if (err) return callback(err); - * - * // do some stuff ... - * - * // return processed data - * return callback(null, data); - * }); - * } - * - * var wrapped = async.timeout(myFunction, 1000); - * - * // call `wrapped` as you would `myFunction` - * wrapped({ bar: 'bar' }, function(err, data) { - * // if `myFunction` takes < 1000 ms to execute, `err` - * // and `data` will have their expected values - * - * // else `err` will be an Error with the code 'ETIMEDOUT' - * }); - */ - function timeout(asyncFn, milliseconds, info) { - var fn = wrapAsync(asyncFn); + var lenB = this._buf[offset++] & 0xff; + if (lenB === null) + return null; - return initialParams((args, callback) => { - var timedOut = false; - var timer; + if ((lenB & 0x80) === 0x80) { + lenB &= 0x7f; - function timeoutCallback() { - var name = asyncFn.name || 'anonymous'; - var error = new Error('Callback function "' + name + '" timed out.'); - error.code = 'ETIMEDOUT'; - if (info) { - error.info = info; - } - timedOut = true; - callback(error); - } + if (lenB === 0) + throw newInvalidAsn1Error('Indefinite length not supported'); - args.push((...cbArgs) => { - if (!timedOut) { - callback(...cbArgs); - clearTimeout(timer); - } - }); + if (lenB > 4) + throw newInvalidAsn1Error('encoding too long'); - // setup timer and call original function - timer = setTimeout(timeoutCallback, milliseconds); - fn(...args); - }); - } + if (this._size - offset < lenB) + return null; - function range(size) { - var result = Array(size); - while (size--) { - result[size] = size; - } - return result; - } + this._len = 0; + for (var i = 0; i < lenB; i++) + this._len = (this._len << 8) + (this._buf[offset++] & 0xff); - /** - * The same as [times]{@link module:ControlFlow.times} but runs a maximum of `limit` async operations at a - * time. - * - * @name timesLimit - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.times]{@link module:ControlFlow.times} - * @category Control Flow - * @param {number} count - The number of times to run the function. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - The async function to call `n` times. - * Invoked with the iteration index and a callback: (n, next). - * @param {Function} callback - see [async.map]{@link module:Collections.map}. - * @returns {Promise} a promise, if no callback is provided - */ - function timesLimit(count, limit, iteratee, callback) { - var _iteratee = wrapAsync(iteratee); - return mapLimit$1(range(count), limit, _iteratee, callback); - } + } else { + // Wasn't a variable length + this._len = lenB; + } - /** - * Calls the `iteratee` function `n` times, and accumulates results in the same - * manner you would use with [map]{@link module:Collections.map}. - * - * @name times - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.map]{@link module:Collections.map} - * @category Control Flow - * @param {number} n - The number of times to run the function. - * @param {AsyncFunction} iteratee - The async function to call `n` times. - * Invoked with the iteration index and a callback: (n, next). - * @param {Function} callback - see {@link module:Collections.map}. - * @returns {Promise} a promise, if no callback is provided - * @example - * - * // Pretend this is some complicated async factory - * var createUser = function(id, callback) { - * callback(null, { - * id: 'user' + id - * }); - * }; - * - * // generate 5 users - * async.times(5, function(n, next) { - * createUser(n, function(err, user) { - * next(err, user); - * }); - * }, function(err, users) { - * // we should now have 5 users - * }); - */ - function times (n, iteratee, callback) { - return timesLimit(n, Infinity, iteratee, callback) - } + return offset; +}; - /** - * The same as [times]{@link module:ControlFlow.times} but runs only a single async operation at a time. - * - * @name timesSeries - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.times]{@link module:ControlFlow.times} - * @category Control Flow - * @param {number} n - The number of times to run the function. - * @param {AsyncFunction} iteratee - The async function to call `n` times. - * Invoked with the iteration index and a callback: (n, next). - * @param {Function} callback - see {@link module:Collections.map}. - * @returns {Promise} a promise, if no callback is provided - */ - function timesSeries (n, iteratee, callback) { - return timesLimit(n, 1, iteratee, callback) - } - /** - * A relative of `reduce`. Takes an Object or Array, and iterates over each - * element in parallel, each step potentially mutating an `accumulator` value. - * The type of the accumulator defaults to the type of collection passed in. - * - * @name transform - * @static - * @memberOf module:Collections - * @method - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {*} [accumulator] - The initial state of the transform. If omitted, - * it will default to an empty Object or Array, depending on the type of `coll` - * @param {AsyncFunction} iteratee - A function applied to each item in the - * collection that potentially modifies the accumulator. - * Invoked with (accumulator, item, key, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result is the transformed accumulator. - * Invoked with (err, result). - * @returns {Promise} a promise, if no callback provided - * @example - * - * async.transform([1,2,3], function(acc, item, index, callback) { - * // pointless async: - * process.nextTick(function() { - * acc[index] = item * 2 - * callback(null) - * }); - * }, function(err, result) { - * // result is now equal to [2, 4, 6] - * }); - * - * @example - * - * async.transform({a: 1, b: 2, c: 3}, function (obj, val, key, callback) { - * setImmediate(function () { - * obj[key] = val * 2; - * callback(); - * }) - * }, function (err, result) { - * // result is equal to {a: 2, b: 4, c: 6} - * }) - */ - function transform (coll, accumulator, iteratee, callback) { - if (arguments.length <= 3 && typeof accumulator === 'function') { - callback = iteratee; - iteratee = accumulator; - accumulator = Array.isArray(coll) ? [] : {}; - } - callback = once(callback || promiseCallback()); - var _iteratee = wrapAsync(iteratee); +/** + * Parses the next sequence in this BER buffer. + * + * To get the length of the sequence, call `Reader.length`. + * + * @return {Number} the sequence's tag. + */ +Reader.prototype.readSequence = function (tag) { + var seq = this.peek(); + if (seq === null) + return null; + if (tag !== undefined && tag !== seq) + throw newInvalidAsn1Error('Expected 0x' + tag.toString(16) + + ': got 0x' + seq.toString(16)); - eachOf$1(coll, (v, k, cb) => { - _iteratee(accumulator, v, k, cb); - }, err => callback(err, accumulator)); - return callback[PROMISE_SYMBOL] - } + var o = this.readLength(this._offset + 1); // stored in `length` + if (o === null) + return null; - /** - * It runs each task in series but stops whenever any of the functions were - * successful. If one of the tasks were successful, the `callback` will be - * passed the result of the successful task. If all tasks fail, the callback - * will be passed the error and result (if any) of the final attempt. - * - * @name tryEach - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing functions to - * run, each function is passed a `callback(err, result)` it must call on - * completion with an error `err` (which can be `null`) and an optional `result` - * value. - * @param {Function} [callback] - An optional callback which is called when one - * of the tasks has succeeded, or all have failed. It receives the `err` and - * `result` arguments of the last attempt at completing the `task`. Invoked with - * (err, results). - * @returns {Promise} a promise, if no callback is passed - * @example - * async.tryEach([ - * function getDataFromFirstWebsite(callback) { - * // Try getting the data from the first website - * callback(err, data); - * }, - * function getDataFromSecondWebsite(callback) { - * // First website failed, - * // Try getting the data from the backup website - * callback(err, data); - * } - * ], - * // optional callback - * function(err, results) { - * Now do something with the data. - * }); - * - */ - function tryEach(tasks, callback) { - var error = null; - var result; - return eachSeries$1(tasks, (task, taskCb) => { - wrapAsync(task)((err, ...args) => { - if (err === false) return taskCb(err); + this._offset = o; + return seq; +}; - if (args.length < 2) { - [result] = args; - } else { - result = args; - } - error = err; - taskCb(err ? null : {}); - }); - }, () => callback(error, result)); - } - var tryEach$1 = awaitify(tryEach); +Reader.prototype.readInt = function () { + return this._readTag(ASN1.Integer); +}; - /** - * Undoes a [memoize]{@link module:Utils.memoize}d function, reverting it to the original, - * unmemoized form. Handy for testing. - * - * @name unmemoize - * @static - * @memberOf module:Utils - * @method - * @see [async.memoize]{@link module:Utils.memoize} - * @category Util - * @param {AsyncFunction} fn - the memoized function - * @returns {AsyncFunction} a function that calls the original unmemoized function - */ - function unmemoize(fn) { - return (...args) => { - return (fn.unmemoized || fn)(...args); - }; - } - /** - * Repeatedly call `iteratee`, while `test` returns `true`. Calls `callback` when - * stopped, or an error occurs. - * - * @name whilst - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {AsyncFunction} test - asynchronous truth test to perform before each - * execution of `iteratee`. Invoked with (). - * @param {AsyncFunction} iteratee - An async function which is called each time - * `test` passes. Invoked with (callback). - * @param {Function} [callback] - A callback which is called after the test - * function has failed and repeated execution of `iteratee` has stopped. `callback` - * will be passed an error and any arguments passed to the final `iteratee`'s - * callback. Invoked with (err, [results]); - * @returns {Promise} a promise, if no callback is passed - * @example - * - * var count = 0; - * async.whilst( - * function test(cb) { cb(null, count < 5); }, - * function iter(callback) { - * count++; - * setTimeout(function() { - * callback(null, count); - * }, 1000); - * }, - * function (err, n) { - * // 5 seconds have passed, n = 5 - * } - * ); - */ - function whilst(test, iteratee, callback) { - callback = onlyOnce(callback); - var _fn = wrapAsync(iteratee); - var _test = wrapAsync(test); - var results = []; +Reader.prototype.readBoolean = function () { + return (this._readTag(ASN1.Boolean) === 0 ? false : true); +}; - function next(err, ...rest) { - if (err) return callback(err); - results = rest; - if (err === false) return; - _test(check); - } - function check(err, truth) { - if (err) return callback(err); - if (err === false) return; - if (!truth) return callback(null, ...results); - _fn(next); - } +Reader.prototype.readEnumeration = function () { + return this._readTag(ASN1.Enumeration); +}; - return _test(check); - } - var whilst$1 = awaitify(whilst, 3); - /** - * Repeatedly call `iteratee` until `test` returns `true`. Calls `callback` when - * stopped, or an error occurs. `callback` will be passed an error and any - * arguments passed to the final `iteratee`'s callback. - * - * The inverse of [whilst]{@link module:ControlFlow.whilst}. - * - * @name until - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.whilst]{@link module:ControlFlow.whilst} - * @category Control Flow - * @param {AsyncFunction} test - asynchronous truth test to perform before each - * execution of `iteratee`. Invoked with (callback). - * @param {AsyncFunction} iteratee - An async function which is called each time - * `test` fails. Invoked with (callback). - * @param {Function} [callback] - A callback which is called after the test - * function has passed and repeated execution of `iteratee` has stopped. `callback` - * will be passed an error and any arguments passed to the final `iteratee`'s - * callback. Invoked with (err, [results]); - * @returns {Promise} a promise, if a callback is not passed - * - * @example - * const results = [] - * let finished = false - * async.until(function test(page, cb) { - * cb(null, finished) - * }, function iter(next) { - * fetchPage(url, (err, body) => { - * if (err) return next(err) - * results = results.concat(body.objects) - * finished = !!body.next - * next(err) - * }) - * }, function done (err) { - * // all pages have been fetched - * }) - */ - function until(test, iteratee, callback) { - const _test = wrapAsync(test); - return whilst$1((cb) => _test((err, truth) => cb (err, !truth)), iteratee, callback); - } +Reader.prototype.readString = function (tag, retbuf) { + if (!tag) + tag = ASN1.OctetString; - /** - * Runs the `tasks` array of functions in series, each passing their results to - * the next in the array. However, if any of the `tasks` pass an error to their - * own callback, the next function is not executed, and the main `callback` is - * immediately called with the error. - * - * @name waterfall - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Array} tasks - An array of [async functions]{@link AsyncFunction} - * to run. - * Each function should complete with any number of `result` values. - * The `result` values will be passed as arguments, in order, to the next task. - * @param {Function} [callback] - An optional callback to run once all the - * functions have completed. This will be passed the results of the last task's - * callback. Invoked with (err, [results]). - * @returns undefined - * @example - * - * async.waterfall([ - * function(callback) { - * callback(null, 'one', 'two'); - * }, - * function(arg1, arg2, callback) { - * // arg1 now equals 'one' and arg2 now equals 'two' - * callback(null, 'three'); - * }, - * function(arg1, callback) { - * // arg1 now equals 'three' - * callback(null, 'done'); - * } - * ], function (err, result) { - * // result now equals 'done' - * }); - * - * // Or, with named functions: - * async.waterfall([ - * myFirstFunction, - * mySecondFunction, - * myLastFunction, - * ], function (err, result) { - * // result now equals 'done' - * }); - * function myFirstFunction(callback) { - * callback(null, 'one', 'two'); - * } - * function mySecondFunction(arg1, arg2, callback) { - * // arg1 now equals 'one' and arg2 now equals 'two' - * callback(null, 'three'); - * } - * function myLastFunction(arg1, callback) { - * // arg1 now equals 'three' - * callback(null, 'done'); - * } - */ - function waterfall (tasks, callback) { - callback = once(callback); - if (!Array.isArray(tasks)) return callback(new Error('First argument to waterfall must be an array of functions')); - if (!tasks.length) return callback(); - var taskIndex = 0; + var b = this.peek(); + if (b === null) + return null; - function nextTask(args) { - var task = wrapAsync(tasks[taskIndex++]); - task(...args, onlyOnce(next)); - } + if (b !== tag) + throw newInvalidAsn1Error('Expected 0x' + tag.toString(16) + + ': got 0x' + b.toString(16)); - function next(err, ...args) { - if (err === false) return - if (err || taskIndex === tasks.length) { - return callback(err, ...args); - } - nextTask(args); - } + var o = this.readLength(this._offset + 1); // stored in `length` - nextTask([]); - } + if (o === null) + return null; - var waterfall$1 = awaitify(waterfall); + if (this.length > this._size - o) + return null; - /** - * An "async function" in the context of Async is an asynchronous function with - * a variable number of parameters, with the final parameter being a callback. - * (`function (arg1, arg2, ..., callback) {}`) - * The final callback is of the form `callback(err, results...)`, which must be - * called once the function is completed. The callback should be called with a - * Error as its first argument to signal that an error occurred. - * Otherwise, if no error occurred, it should be called with `null` as the first - * argument, and any additional `result` arguments that may apply, to signal - * successful completion. - * The callback must be called exactly once, ideally on a later tick of the - * JavaScript event loop. - * - * This type of function is also referred to as a "Node-style async function", - * or a "continuation passing-style function" (CPS). Most of the methods of this - * library are themselves CPS/Node-style async functions, or functions that - * return CPS/Node-style async functions. - * - * Wherever we accept a Node-style async function, we also directly accept an - * [ES2017 `async` function]{@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function}. - * In this case, the `async` function will not be passed a final callback - * argument, and any thrown error will be used as the `err` argument of the - * implicit callback, and the return value will be used as the `result` value. - * (i.e. a `rejected` of the returned Promise becomes the `err` callback - * argument, and a `resolved` value becomes the `result`.) - * - * Note, due to JavaScript limitations, we can only detect native `async` - * functions and not transpilied implementations. - * Your environment must have `async`/`await` support for this to work. - * (e.g. Node > v7.6, or a recent version of a modern browser). - * If you are using `async` functions through a transpiler (e.g. Babel), you - * must still wrap the function with [asyncify]{@link module:Utils.asyncify}, - * because the `async function` will be compiled to an ordinary function that - * returns a promise. - * - * @typedef {Function} AsyncFunction - * @static - */ + this._offset = o; - var index = { - apply, - applyEach: applyEach$1, - applyEachSeries, - asyncify, - auto, - autoInject, - cargo, - cargoQueue: cargo$1, - compose, - concat: concat$1, - concatLimit: concatLimit$1, - concatSeries: concatSeries$1, - constant, - detect: detect$1, - detectLimit: detectLimit$1, - detectSeries: detectSeries$1, - dir, - doUntil, - doWhilst: doWhilst$1, - each, - eachLimit: eachLimit$2, - eachOf: eachOf$1, - eachOfLimit: eachOfLimit$2, - eachOfSeries: eachOfSeries$1, - eachSeries: eachSeries$1, - ensureAsync, - every: every$1, - everyLimit: everyLimit$1, - everySeries: everySeries$1, - filter: filter$1, - filterLimit: filterLimit$1, - filterSeries: filterSeries$1, - forever: forever$1, - groupBy, - groupByLimit: groupByLimit$1, - groupBySeries, - log, - map: map$1, - mapLimit: mapLimit$1, - mapSeries: mapSeries$1, - mapValues, - mapValuesLimit: mapValuesLimit$1, - mapValuesSeries, - memoize, - nextTick, - parallel: parallel$1, - parallelLimit, - priorityQueue, - queue: queue$1, - race: race$1, - reduce: reduce$1, - reduceRight, - reflect, - reflectAll, - reject: reject$2, - rejectLimit: rejectLimit$1, - rejectSeries: rejectSeries$1, - retry, - retryable, - seq, - series, - setImmediate: setImmediate$1, - some: some$1, - someLimit: someLimit$1, - someSeries: someSeries$1, - sortBy: sortBy$1, - timeout, - times, - timesLimit, - timesSeries, - transform, - tryEach: tryEach$1, - unmemoize, - until, - waterfall: waterfall$1, - whilst: whilst$1, + if (this.length === 0) + return retbuf ? Buffer.alloc(0) : ''; - // aliases - all: every$1, - allLimit: everyLimit$1, - allSeries: everySeries$1, - any: some$1, - anyLimit: someLimit$1, - anySeries: someSeries$1, - find: detect$1, - findLimit: detectLimit$1, - findSeries: detectSeries$1, - flatMap: concat$1, - flatMapLimit: concatLimit$1, - flatMapSeries: concatSeries$1, - forEach: each, - forEachSeries: eachSeries$1, - forEachLimit: eachLimit$2, - forEachOf: eachOf$1, - forEachOfSeries: eachOfSeries$1, - forEachOfLimit: eachOfLimit$2, - inject: reduce$1, - foldl: reduce$1, - foldr: reduceRight, - select: filter$1, - selectLimit: filterLimit$1, - selectSeries: filterSeries$1, - wrapSync: asyncify, - during: whilst$1, - doDuring: doWhilst$1 - }; + var str = this._buf.slice(this._offset, this._offset + this.length); + this._offset += this.length; - exports.default = index; - exports.apply = apply; - exports.applyEach = applyEach$1; - exports.applyEachSeries = applyEachSeries; - exports.asyncify = asyncify; - exports.auto = auto; - exports.autoInject = autoInject; - exports.cargo = cargo; - exports.cargoQueue = cargo$1; - exports.compose = compose; - exports.concat = concat$1; - exports.concatLimit = concatLimit$1; - exports.concatSeries = concatSeries$1; - exports.constant = constant; - exports.detect = detect$1; - exports.detectLimit = detectLimit$1; - exports.detectSeries = detectSeries$1; - exports.dir = dir; - exports.doUntil = doUntil; - exports.doWhilst = doWhilst$1; - exports.each = each; - exports.eachLimit = eachLimit$2; - exports.eachOf = eachOf$1; - exports.eachOfLimit = eachOfLimit$2; - exports.eachOfSeries = eachOfSeries$1; - exports.eachSeries = eachSeries$1; - exports.ensureAsync = ensureAsync; - exports.every = every$1; - exports.everyLimit = everyLimit$1; - exports.everySeries = everySeries$1; - exports.filter = filter$1; - exports.filterLimit = filterLimit$1; - exports.filterSeries = filterSeries$1; - exports.forever = forever$1; - exports.groupBy = groupBy; - exports.groupByLimit = groupByLimit$1; - exports.groupBySeries = groupBySeries; - exports.log = log; - exports.map = map$1; - exports.mapLimit = mapLimit$1; - exports.mapSeries = mapSeries$1; - exports.mapValues = mapValues; - exports.mapValuesLimit = mapValuesLimit$1; - exports.mapValuesSeries = mapValuesSeries; - exports.memoize = memoize; - exports.nextTick = nextTick; - exports.parallel = parallel$1; - exports.parallelLimit = parallelLimit; - exports.priorityQueue = priorityQueue; - exports.queue = queue$1; - exports.race = race$1; - exports.reduce = reduce$1; - exports.reduceRight = reduceRight; - exports.reflect = reflect; - exports.reflectAll = reflectAll; - exports.reject = reject$2; - exports.rejectLimit = rejectLimit$1; - exports.rejectSeries = rejectSeries$1; - exports.retry = retry; - exports.retryable = retryable; - exports.seq = seq; - exports.series = series; - exports.setImmediate = setImmediate$1; - exports.some = some$1; - exports.someLimit = someLimit$1; - exports.someSeries = someSeries$1; - exports.sortBy = sortBy$1; - exports.timeout = timeout; - exports.times = times; - exports.timesLimit = timesLimit; - exports.timesSeries = timesSeries; - exports.transform = transform; - exports.tryEach = tryEach$1; - exports.unmemoize = unmemoize; - exports.until = until; - exports.waterfall = waterfall$1; - exports.whilst = whilst$1; - exports.all = every$1; - exports.allLimit = everyLimit$1; - exports.allSeries = everySeries$1; - exports.any = some$1; - exports.anyLimit = someLimit$1; - exports.anySeries = someSeries$1; - exports.find = detect$1; - exports.findLimit = detectLimit$1; - exports.findSeries = detectSeries$1; - exports.flatMap = concat$1; - exports.flatMapLimit = concatLimit$1; - exports.flatMapSeries = concatSeries$1; - exports.forEach = each; - exports.forEachSeries = eachSeries$1; - exports.forEachLimit = eachLimit$2; - exports.forEachOf = eachOf$1; - exports.forEachOfSeries = eachOfSeries$1; - exports.forEachOfLimit = eachOfLimit$2; - exports.inject = reduce$1; - exports.foldl = reduce$1; - exports.foldr = reduceRight; - exports.select = filter$1; - exports.selectLimit = filterLimit$1; - exports.selectSeries = filterSeries$1; - exports.wrapSync = asyncify; - exports.during = whilst$1; - exports.doDuring = doWhilst$1; + return retbuf ? str : str.toString('utf8'); +}; - Object.defineProperty(exports, '__esModule', { value: true }); +Reader.prototype.readOID = function (tag) { + if (!tag) + tag = ASN1.OID; -}))); + var b = this.readString(tag, true); + if (b === null) + return null; + var values = []; + var value = 0; -/***/ }), + for (var i = 0; i < b.length; i++) { + var byte = b[i] & 0xff; -/***/ 14812: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + value <<= 7; + value += byte & 0x7f; + if ((byte & 0x80) === 0) { + values.push(value); + value = 0; + } + } -module.exports = -{ - parallel : __nccwpck_require__(8210), - serial : __nccwpck_require__(50445), - serialOrdered : __nccwpck_require__(3578) + value = values.shift(); + values.unshift(value % 40); + values.unshift((value / 40) >> 0); + + return values.join('.'); }; -/***/ }), +Reader.prototype._readTag = function (tag) { + assert.ok(tag !== undefined); -/***/ 1700: -/***/ ((module) => { + var b = this.peek(); -// API -module.exports = abort; + if (b === null) + return null; -/** - * Aborts leftover active jobs - * - * @param {object} state - current state object - */ -function abort(state) -{ - Object.keys(state.jobs).forEach(clean.bind(state)); + if (b !== tag) + throw newInvalidAsn1Error('Expected 0x' + tag.toString(16) + + ': got 0x' + b.toString(16)); - // reset leftover jobs - state.jobs = {}; -} + var o = this.readLength(this._offset + 1); // stored in `length` + if (o === null) + return null; -/** - * Cleans up leftover job by invoking abort function for the provided job id - * - * @this state - * @param {string|number} key - job id to abort - */ -function clean(key) -{ - if (typeof this.jobs[key] == 'function') - { - this.jobs[key](); - } -} + if (this.length > 4) + throw newInvalidAsn1Error('Integer too long: ' + this.length); + if (this.length > this._size - o) + return null; + this._offset = o; -/***/ }), + var fb = this._buf[this._offset]; + var value = 0; -/***/ 72794: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + for (var i = 0; i < this.length; i++) { + value <<= 8; + value |= (this._buf[this._offset++] & 0xff); + } -var defer = __nccwpck_require__(15295); + if ((fb & 0x80) === 0x80 && i !== 4) + value -= (1 << (i * 8)); -// API -module.exports = async; + return value >> 0; +}; -/** - * Runs provided callback asynchronously - * even if callback itself is not - * - * @param {function} callback - callback to invoke - * @returns {function} - augmented callback - */ -function async(callback) -{ - var isAsync = false; - // check if async happened - defer(function() { isAsync = true; }); - return function async_callback(err, result) - { - if (isAsync) - { - callback(err, result); - } - else - { - defer(function nextTick_callback() - { - callback(err, result); - }); - } - }; -} +// --- Exported API + +module.exports = Reader; /***/ }), -/***/ 15295: +/***/ 42473: /***/ ((module) => { -module.exports = defer; +// Copyright 2011 Mark Cavage All rights reserved. -/** - * Runs provided function on next iteration of the event loop - * - * @param {function} fn - function to run - */ -function defer(fn) -{ - var nextTick = typeof setImmediate == 'function' - ? setImmediate - : ( - typeof process == 'object' && typeof process.nextTick == 'function' - ? process.nextTick - : null - ); - if (nextTick) - { - nextTick(fn); - } - else - { - setTimeout(fn, 0); - } -} +module.exports = { + EOC: 0, + Boolean: 1, + Integer: 2, + BitString: 3, + OctetString: 4, + Null: 5, + OID: 6, + ObjectDescriptor: 7, + External: 8, + Real: 9, // float + Enumeration: 10, + PDV: 11, + Utf8String: 12, + RelativeOID: 13, + Sequence: 16, + Set: 17, + NumericString: 18, + PrintableString: 19, + T61String: 20, + VideotexString: 21, + IA5String: 22, + UTCTime: 23, + GeneralizedTime: 24, + GraphicString: 25, + VisibleString: 26, + GeneralString: 28, + UniversalString: 29, + CharacterString: 30, + BMPString: 31, + Constructor: 32, + Context: 128 +}; /***/ }), -/***/ 9023: +/***/ 43200: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var async = __nccwpck_require__(72794) - , abort = __nccwpck_require__(1700) - ; - -// API -module.exports = iterate; - -/** - * Iterates over each job object - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {object} state - current job status - * @param {function} callback - invoked when all elements processed - */ -function iterate(list, iterator, state, callback) -{ - // store current index - var key = state['keyedList'] ? state['keyedList'][state.index] : state.index; - - state.jobs[key] = runJob(iterator, key, list[key], function(error, output) - { - // don't repeat yourself - // skip secondary callbacks - if (!(key in state.jobs)) - { - return; - } - - // clean up jobs - delete state.jobs[key]; - - if (error) - { - // don't process rest of the results - // stop still active jobs - // and reset the list - abort(state); - } - else - { - state.results[key] = output; - } +// Copyright 2011 Mark Cavage All rights reserved. - // return salvaged results - callback(error, state.results); - }); -} +var assert = __nccwpck_require__(42357); +var Buffer = __nccwpck_require__(15118).Buffer; +var ASN1 = __nccwpck_require__(42473); +var errors = __nccwpck_require__(99348); -/** - * Runs iterator over provided job element - * - * @param {function} iterator - iterator to invoke - * @param {string|number} key - key/index of the element in the list of jobs - * @param {mixed} item - job description - * @param {function} callback - invoked after iterator is done with the job - * @returns {function|mixed} - job abort function or something else - */ -function runJob(iterator, key, item, callback) -{ - var aborter; - // allow shortcut if iterator expects only two arguments - if (iterator.length == 2) - { - aborter = iterator(item, async(callback)); - } - // otherwise go with full three arguments - else - { - aborter = iterator(item, key, async(callback)); - } +// --- Globals - return aborter; -} +var newInvalidAsn1Error = errors.newInvalidAsn1Error; +var DEFAULT_OPTS = { + size: 1024, + growthFactor: 8 +}; -/***/ }), -/***/ 42474: -/***/ ((module) => { +// --- Helpers -// API -module.exports = state; +function merge(from, to) { + assert.ok(from); + assert.equal(typeof (from), 'object'); + assert.ok(to); + assert.equal(typeof (to), 'object'); -/** - * Creates initial state object - * for iteration over list - * - * @param {array|object} list - list to iterate over - * @param {function|null} sortMethod - function to use for keys sort, - * or `null` to keep them as is - * @returns {object} - initial state object - */ -function state(list, sortMethod) -{ - var isNamedList = !Array.isArray(list) - , initState = - { - index : 0, - keyedList: isNamedList || sortMethod ? Object.keys(list) : null, - jobs : {}, - results : isNamedList ? {} : [], - size : isNamedList ? Object.keys(list).length : list.length - } - ; + var keys = Object.getOwnPropertyNames(from); + keys.forEach(function (key) { + if (to[key]) + return; - if (sortMethod) - { - // sort array keys based on it's values - // sort object's keys just on own merit - initState.keyedList.sort(isNamedList ? sortMethod : function(a, b) - { - return sortMethod(list[a], list[b]); - }); - } + var value = Object.getOwnPropertyDescriptor(from, key); + Object.defineProperty(to, key, value); + }); - return initState; + return to; } -/***/ }), - -/***/ 37942: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var abort = __nccwpck_require__(1700) - , async = __nccwpck_require__(72794) - ; - -// API -module.exports = terminator; -/** - * Terminates jobs in the attached state context - * - * @this AsyncKitState# - * @param {function} callback - final callback to invoke after termination - */ -function terminator(callback) -{ - if (!Object.keys(this.jobs).length) - { - return; - } +// --- API - // fast forward iteration index - this.index = this.size; +function Writer(options) { + options = merge(DEFAULT_OPTS, options || {}); - // abort jobs - abort(this); + this._buf = Buffer.alloc(options.size || 1024); + this._size = this._buf.length; + this._offset = 0; + this._options = options; - // send back results we have so far - async(callback)(null, this.results); + // A list of offsets in the buffer where we need to insert + // sequence tag/len pairs. + this._seq = []; } +Object.defineProperty(Writer.prototype, 'buffer', { + get: function () { + if (this._seq.length) + throw newInvalidAsn1Error(this._seq.length + ' unended sequence(s)'); -/***/ }), - -/***/ 8210: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + return (this._buf.slice(0, this._offset)); + } +}); -var iterate = __nccwpck_require__(9023) - , initState = __nccwpck_require__(42474) - , terminator = __nccwpck_require__(37942) - ; +Writer.prototype.writeByte = function (b) { + if (typeof (b) !== 'number') + throw new TypeError('argument must be a Number'); -// Public API -module.exports = parallel; + this._ensure(1); + this._buf[this._offset++] = b; +}; -/** - * Runs iterator over provided array elements in parallel - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} callback - invoked when all elements processed - * @returns {function} - jobs terminator - */ -function parallel(list, iterator, callback) -{ - var state = initState(list); - while (state.index < (state['keyedList'] || list).length) - { - iterate(list, iterator, state, function(error, result) - { - if (error) - { - callback(error, result); - return; - } +Writer.prototype.writeInt = function (i, tag) { + if (typeof (i) !== 'number') + throw new TypeError('argument must be a Number'); + if (typeof (tag) !== 'number') + tag = ASN1.Integer; - // looks like it's the last one - if (Object.keys(state.jobs).length === 0) - { - callback(null, state.results); - return; - } - }); + var sz = 4; - state.index++; + while ((((i & 0xff800000) === 0) || ((i & 0xff800000) === 0xff800000 >> 0)) && + (sz > 1)) { + sz--; + i <<= 8; } - return terminator.bind(state, callback); -} - - -/***/ }), - -/***/ 50445: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var serialOrdered = __nccwpck_require__(3578); + if (sz > 4) + throw newInvalidAsn1Error('BER ints cannot be > 0xffffffff'); -// Public API -module.exports = serial; + this._ensure(2 + sz); + this._buf[this._offset++] = tag; + this._buf[this._offset++] = sz; -/** - * Runs iterator over provided array elements in series - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} callback - invoked when all elements processed - * @returns {function} - jobs terminator - */ -function serial(list, iterator, callback) -{ - return serialOrdered(list, iterator, null, callback); -} + while (sz-- > 0) { + this._buf[this._offset++] = ((i & 0xff000000) >>> 24); + i <<= 8; + } +}; -/***/ }), -/***/ 3578: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +Writer.prototype.writeNull = function () { + this.writeByte(ASN1.Null); + this.writeByte(0x00); +}; -var iterate = __nccwpck_require__(9023) - , initState = __nccwpck_require__(42474) - , terminator = __nccwpck_require__(37942) - ; -// Public API -module.exports = serialOrdered; -// sorting helpers -module.exports.ascending = ascending; -module.exports.descending = descending; +Writer.prototype.writeEnumeration = function (i, tag) { + if (typeof (i) !== 'number') + throw new TypeError('argument must be a Number'); + if (typeof (tag) !== 'number') + tag = ASN1.Enumeration; -/** - * Runs iterator over provided sorted array elements in series - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} sortMethod - custom sort function - * @param {function} callback - invoked when all elements processed - * @returns {function} - jobs terminator - */ -function serialOrdered(list, iterator, sortMethod, callback) -{ - var state = initState(list, sortMethod); + return this.writeInt(i, tag); +}; - iterate(list, iterator, state, function iteratorHandler(error, result) - { - if (error) - { - callback(error, result); - return; - } - state.index++; +Writer.prototype.writeBoolean = function (b, tag) { + if (typeof (b) !== 'boolean') + throw new TypeError('argument must be a Boolean'); + if (typeof (tag) !== 'number') + tag = ASN1.Boolean; - // are we there yet? - if (state.index < (state['keyedList'] || list).length) - { - iterate(list, iterator, state, iteratorHandler); - return; - } + this._ensure(3); + this._buf[this._offset++] = tag; + this._buf[this._offset++] = 0x01; + this._buf[this._offset++] = b ? 0xff : 0x00; +}; - // done here - callback(null, state.results); - }); - return terminator.bind(state, callback); -} +Writer.prototype.writeString = function (s, tag) { + if (typeof (s) !== 'string') + throw new TypeError('argument must be a string (was: ' + typeof (s) + ')'); + if (typeof (tag) !== 'number') + tag = ASN1.OctetString; -/* - * -- Sort methods - */ + var len = Buffer.byteLength(s); + this.writeByte(tag); + this.writeLength(len); + if (len) { + this._ensure(len); + this._buf.write(s, this._offset); + this._offset += len; + } +}; -/** - * sort helper to sort array elements in ascending order - * - * @param {mixed} a - an item to compare - * @param {mixed} b - an item to compare - * @returns {number} - comparison result - */ -function ascending(a, b) -{ - return a < b ? -1 : a > b ? 1 : 0; -} -/** - * sort helper to sort array elements in descending order - * - * @param {mixed} a - an item to compare - * @param {mixed} b - an item to compare - * @returns {number} - comparison result - */ -function descending(a, b) -{ - return -1 * ascending(a, b); -} +Writer.prototype.writeBuffer = function (buf, tag) { + if (typeof (tag) !== 'number') + throw new TypeError('tag must be a number'); + if (!Buffer.isBuffer(buf)) + throw new TypeError('argument must be a buffer'); + this.writeByte(tag); + this.writeLength(buf.length); + this._ensure(buf.length); + buf.copy(this._buf, this._offset, 0, buf.length); + this._offset += buf.length; +}; -/***/ }), -/***/ 96342: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +Writer.prototype.writeStringArray = function (strings) { + if ((!strings instanceof Array)) + throw new TypeError('argument must be an Array[String]'); + var self = this; + strings.forEach(function (s) { + self.writeString(s); + }); +}; -/*! - * Copyright 2010 LearnBoost - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +// This is really to solve DER cases, but whatever for now +Writer.prototype.writeOID = function (s, tag) { + if (typeof (s) !== 'string') + throw new TypeError('argument must be a string'); + if (typeof (tag) !== 'number') + tag = ASN1.OID; -/** - * Module dependencies. - */ + if (!/^([0-9]+\.){3,}[0-9]+$/.test(s)) + throw new Error('argument is not a valid OID string'); -var crypto = __nccwpck_require__(76417) - , parse = __nccwpck_require__(78835).parse - ; + function encodeOctet(bytes, octet) { + if (octet < 128) { + bytes.push(octet); + } else if (octet < 16384) { + bytes.push((octet >>> 7) | 0x80); + bytes.push(octet & 0x7F); + } else if (octet < 2097152) { + bytes.push((octet >>> 14) | 0x80); + bytes.push(((octet >>> 7) | 0x80) & 0xFF); + bytes.push(octet & 0x7F); + } else if (octet < 268435456) { + bytes.push((octet >>> 21) | 0x80); + bytes.push(((octet >>> 14) | 0x80) & 0xFF); + bytes.push(((octet >>> 7) | 0x80) & 0xFF); + bytes.push(octet & 0x7F); + } else { + bytes.push(((octet >>> 28) | 0x80) & 0xFF); + bytes.push(((octet >>> 21) | 0x80) & 0xFF); + bytes.push(((octet >>> 14) | 0x80) & 0xFF); + bytes.push(((octet >>> 7) | 0x80) & 0xFF); + bytes.push(octet & 0x7F); + } + } -/** - * Valid keys. - */ + var tmp = s.split('.'); + var bytes = []; + bytes.push(parseInt(tmp[0], 10) * 40 + parseInt(tmp[1], 10)); + tmp.slice(2).forEach(function (b) { + encodeOctet(bytes, parseInt(b, 10)); + }); -var keys = - [ 'acl' - , 'location' - , 'logging' - , 'notification' - , 'partNumber' - , 'policy' - , 'requestPayment' - , 'torrent' - , 'uploadId' - , 'uploads' - , 'versionId' - , 'versioning' - , 'versions' - , 'website' - ] + var self = this; + this._ensure(2 + bytes.length); + this.writeByte(tag); + this.writeLength(bytes.length); + bytes.forEach(function (b) { + self.writeByte(b); + }); +}; -/** - * Return an "Authorization" header value with the given `options` - * in the form of "AWS :" - * - * @param {Object} options - * @return {String} - * @api private - */ -function authorization (options) { - return 'AWS ' + options.key + ':' + sign(options) -} +Writer.prototype.writeLength = function (len) { + if (typeof (len) !== 'number') + throw new TypeError('argument must be a Number'); -module.exports = authorization -module.exports.authorization = authorization + this._ensure(4); -/** - * Simple HMAC-SHA1 Wrapper - * - * @param {Object} options - * @return {String} - * @api private - */ + if (len <= 0x7f) { + this._buf[this._offset++] = len; + } else if (len <= 0xff) { + this._buf[this._offset++] = 0x81; + this._buf[this._offset++] = len; + } else if (len <= 0xffff) { + this._buf[this._offset++] = 0x82; + this._buf[this._offset++] = len >> 8; + this._buf[this._offset++] = len; + } else if (len <= 0xffffff) { + this._buf[this._offset++] = 0x83; + this._buf[this._offset++] = len >> 16; + this._buf[this._offset++] = len >> 8; + this._buf[this._offset++] = len; + } else { + throw newInvalidAsn1Error('Length too long (> 4 bytes)'); + } +}; -function hmacSha1 (options) { - return crypto.createHmac('sha1', options.secret).update(options.message).digest('base64') -} +Writer.prototype.startSequence = function (tag) { + if (typeof (tag) !== 'number') + tag = ASN1.Sequence | ASN1.Constructor; -module.exports.hmacSha1 = hmacSha1 + this.writeByte(tag); + this._seq.push(this._offset); + this._ensure(3); + this._offset += 3; +}; -/** - * Create a base64 sha1 HMAC for `options`. - * - * @param {Object} options - * @return {String} - * @api private - */ -function sign (options) { - options.message = stringToSign(options) - return hmacSha1(options) -} -module.exports.sign = sign +Writer.prototype.endSequence = function () { + var seq = this._seq.pop(); + var start = seq + 3; + var len = this._offset - start; -/** - * Create a base64 sha1 HMAC for `options`. - * - * Specifically to be used with S3 presigned URLs - * - * @param {Object} options - * @return {String} - * @api private - */ + if (len <= 0x7f) { + this._shift(start, len, -2); + this._buf[seq] = len; + } else if (len <= 0xff) { + this._shift(start, len, -1); + this._buf[seq] = 0x81; + this._buf[seq + 1] = len; + } else if (len <= 0xffff) { + this._buf[seq] = 0x82; + this._buf[seq + 1] = len >> 8; + this._buf[seq + 2] = len; + } else if (len <= 0xffffff) { + this._shift(start, len, 1); + this._buf[seq] = 0x83; + this._buf[seq + 1] = len >> 16; + this._buf[seq + 2] = len >> 8; + this._buf[seq + 3] = len; + } else { + throw newInvalidAsn1Error('Sequence too long'); + } +}; -function signQuery (options) { - options.message = queryStringToSign(options) - return hmacSha1(options) -} -module.exports.signQuery= signQuery -/** - * Return a string for sign() with the given `options`. - * - * Spec: - * - * \n - * \n - * \n - * \n - * [headers\n] - * - * - * @param {Object} options - * @return {String} - * @api private - */ +Writer.prototype._shift = function (start, len, shift) { + assert.ok(start !== undefined); + assert.ok(len !== undefined); + assert.ok(shift); -function stringToSign (options) { - var headers = options.amazonHeaders || '' - if (headers) headers += '\n' - var r = - [ options.verb - , options.md5 - , options.contentType - , options.date ? options.date.toUTCString() : '' - , headers + options.resource - ] - return r.join('\n') -} -module.exports.stringToSign = stringToSign + this._buf.copy(this._buf, start + shift, start, start + len); + this._offset += shift; +}; -/** - * Return a string for sign() with the given `options`, but is meant exclusively - * for S3 presigned URLs - * - * Spec: - * - * \n - * - * - * @param {Object} options - * @return {String} - * @api private - */ +Writer.prototype._ensure = function (len) { + assert.ok(len); -function queryStringToSign (options){ - return 'GET\n\n\n' + options.date + '\n' + options.resource -} -module.exports.queryStringToSign = queryStringToSign + if (this._size - this._offset < len) { + var sz = this._size * this._options.growthFactor; + if (sz - this._offset < len) + sz += len; -/** - * Perform the following: - * - * - ignore non-amazon headers - * - lowercase fields - * - sort lexicographically - * - trim whitespace between ":" - * - join with newline - * - * @param {Object} headers - * @return {String} - * @api private - */ + var buf = Buffer.alloc(sz); -function canonicalizeHeaders (headers) { - var buf = [] - , fields = Object.keys(headers) - ; - for (var i = 0, len = fields.length; i < len; ++i) { - var field = fields[i] - , val = headers[field] - , field = field.toLowerCase() - ; - if (0 !== field.indexOf('x-amz')) continue - buf.push(field + ':' + val) + this._buf.copy(buf, 0, 0, this._offset); + this._buf = buf; + this._size = sz; } - return buf.sort().join('\n') -} -module.exports.canonicalizeHeaders = canonicalizeHeaders +}; -/** - * Perform the following: - * - * - ignore non sub-resources - * - sort lexicographically - * - * @param {String} resource - * @return {String} - * @api private - */ -function canonicalizeResource (resource) { - var url = parse(resource, true) - , path = url.pathname - , buf = [] - ; - Object.keys(url.query).forEach(function(key){ - if (!~keys.indexOf(key)) return - var val = '' == url.query[key] ? '' : '=' + encodeURIComponent(url.query[key]) - buf.push(key + val) - }) +// --- Exported API - return path + (buf.length ? '?' + buf.sort().join('&') : '') -} -module.exports.canonicalizeResource = canonicalizeResource +module.exports = Writer; /***/ }), -/***/ 16071: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -var aws4 = exports, - url = __nccwpck_require__(78835), - querystring = __nccwpck_require__(71191), - crypto = __nccwpck_require__(76417), - lru = __nccwpck_require__(74225), - credentialsCache = lru(1000) +/***/ 80970: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -// http://docs.amazonwebservices.com/general/latest/gr/signature-version-4.html +// Copyright 2011 Mark Cavage All rights reserved. -function hmac(key, string, encoding) { - return crypto.createHmac('sha256', key).update(string, 'utf8').digest(encoding) -} +// If you have no idea what ASN.1 or BER is, see this: +// ftp://ftp.rsa.com/pub/pkcs/ascii/layman.asc -function hash(string, encoding) { - return crypto.createHash('sha256').update(string, 'utf8').digest(encoding) -} +var Ber = __nccwpck_require__(194); -// This function assumes the string has already been percent encoded -function encodeRfc3986(urlEncodedString) { - return urlEncodedString.replace(/[!'()*]/g, function(c) { - return '%' + c.charCodeAt(0).toString(16).toUpperCase() - }) -} -function encodeRfc3986Full(str) { - return encodeRfc3986(encodeURIComponent(str)) -} -// A bit of a combination of: -// https://github.com/aws/aws-sdk-java-v2/blob/dc695de6ab49ad03934e1b02e7263abbd2354be0/core/auth/src/main/java/software/amazon/awssdk/auth/signer/internal/AbstractAws4Signer.java#L59 -// https://github.com/aws/aws-sdk-js/blob/18cb7e5b463b46239f9fdd4a65e2ff8c81831e8f/lib/signers/v4.js#L191-L199 -// https://github.com/mhart/aws4fetch/blob/b3aed16b6f17384cf36ea33bcba3c1e9f3bdfefd/src/main.js#L25-L34 -var HEADERS_TO_IGNORE = { - 'authorization': true, - 'connection': true, - 'x-amzn-trace-id': true, - 'user-agent': true, - 'expect': true, - 'presigned-expires': true, - 'range': true, -} +// --- Exported API -// request: { path | body, [host], [method], [headers], [service], [region] } -// credentials: { accessKeyId, secretAccessKey, [sessionToken] } -function RequestSigner(request, credentials) { +module.exports = { - if (typeof request === 'string') request = url.parse(request) + Ber: Ber, - var headers = request.headers = (request.headers || {}), - hostParts = (!this.service || !this.region) && this.matchHost(request.hostname || request.host || headers.Host || headers.host) + BerReader: Ber.Reader, - this.request = request - this.credentials = credentials || this.defaultCredentials() + BerWriter: Ber.Writer - this.service = request.service || hostParts[0] || '' - this.region = request.region || hostParts[1] || 'us-east-1' +}; - // SES uses a different domain from the service name - if (this.service === 'email') this.service = 'ses' - if (!request.method && request.body) - request.method = 'POST' +/***/ }), - if (!headers.Host && !headers.host) { - headers.Host = request.hostname || request.host || this.createHost() +/***/ 66631: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // If a port is specified explicitly, use it as is - if (request.port) - headers.Host += ':' + request.port - } - if (!request.hostname && !request.host) - request.hostname = headers.Host || headers.host +// Copyright (c) 2012, Mark Cavage. All rights reserved. +// Copyright 2015 Joyent, Inc. - this.isCodeCommitGit = this.service === 'codecommit' && request.method === 'GIT' -} +var assert = __nccwpck_require__(42357); +var Stream = __nccwpck_require__(92413).Stream; +var util = __nccwpck_require__(31669); -RequestSigner.prototype.matchHost = function(host) { - var match = (host || '').match(/([^\.]+)\.(?:([^\.]*)\.)?amazonaws\.com(\.cn)?$/) - var hostParts = (match || []).slice(1, 3) - // ES's hostParts are sometimes the other way round, if the value that is expected - // to be region equals ‘es’ switch them back - // e.g. search-cluster-name-aaaa00aaaa0aaa0aaaaaaa0aaa.us-east-1.es.amazonaws.com - if (hostParts[1] === 'es') - hostParts = hostParts.reverse() +///--- Globals - if (hostParts[1] == 's3') { - hostParts[0] = 's3' - hostParts[1] = 'us-east-1' - } else { - for (var i = 0; i < 2; i++) { - if (/^s3-/.test(hostParts[i])) { - hostParts[1] = hostParts[i].slice(3) - hostParts[0] = 's3' - break - } - } - } +/* JSSTYLED */ +var UUID_REGEXP = /^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$/; - return hostParts -} -// http://docs.aws.amazon.com/general/latest/gr/rande.html -RequestSigner.prototype.isSingleRegion = function() { - // Special case for S3 and SimpleDB in us-east-1 - if (['s3', 'sdb'].indexOf(this.service) >= 0 && this.region === 'us-east-1') return true +///--- Internal - return ['cloudfront', 'ls', 'route53', 'iam', 'importexport', 'sts'] - .indexOf(this.service) >= 0 +function _capitalize(str) { + return (str.charAt(0).toUpperCase() + str.slice(1)); } -RequestSigner.prototype.createHost = function() { - var region = this.isSingleRegion() ? '' : '.' + this.region, - subdomain = this.service === 'ses' ? 'email' : this.service - return subdomain + region + '.amazonaws.com' +function _toss(name, expected, oper, arg, actual) { + throw new assert.AssertionError({ + message: util.format('%s (%s) is required', name, expected), + actual: (actual === undefined) ? typeof (arg) : actual(arg), + expected: expected, + operator: oper || '===', + stackStartFunction: _toss.caller + }); } -RequestSigner.prototype.prepareRequest = function() { - this.parsePath() - - var request = this.request, headers = request.headers, query - - if (request.signQuery) { - - this.parsedPath.query = query = this.parsedPath.query || {} - - if (this.credentials.sessionToken) - query['X-Amz-Security-Token'] = this.credentials.sessionToken - - if (this.service === 's3' && !query['X-Amz-Expires']) - query['X-Amz-Expires'] = 86400 - - if (query['X-Amz-Date']) - this.datetime = query['X-Amz-Date'] - else - query['X-Amz-Date'] = this.getDateTime() - - query['X-Amz-Algorithm'] = 'AWS4-HMAC-SHA256' - query['X-Amz-Credential'] = this.credentials.accessKeyId + '/' + this.credentialString() - query['X-Amz-SignedHeaders'] = this.signedHeaders() - - } else { +function _getClass(arg) { + return (Object.prototype.toString.call(arg).slice(8, -1)); +} - if (!request.doNotModifyHeaders && !this.isCodeCommitGit) { - if (request.body && !headers['Content-Type'] && !headers['content-type']) - headers['Content-Type'] = 'application/x-www-form-urlencoded; charset=utf-8' +function noop() { + // Why even bother with asserts? +} - if (request.body && !headers['Content-Length'] && !headers['content-length']) - headers['Content-Length'] = Buffer.byteLength(request.body) - if (this.credentials.sessionToken && !headers['X-Amz-Security-Token'] && !headers['x-amz-security-token']) - headers['X-Amz-Security-Token'] = this.credentials.sessionToken +///--- Exports - if (this.service === 's3' && !headers['X-Amz-Content-Sha256'] && !headers['x-amz-content-sha256']) - headers['X-Amz-Content-Sha256'] = hash(this.request.body || '', 'hex') - - if (headers['X-Amz-Date'] || headers['x-amz-date']) - this.datetime = headers['X-Amz-Date'] || headers['x-amz-date'] - else - headers['X-Amz-Date'] = this.getDateTime() +var types = { + bool: { + check: function (arg) { return typeof (arg) === 'boolean'; } + }, + func: { + check: function (arg) { return typeof (arg) === 'function'; } + }, + string: { + check: function (arg) { return typeof (arg) === 'string'; } + }, + object: { + check: function (arg) { + return typeof (arg) === 'object' && arg !== null; + } + }, + number: { + check: function (arg) { + return typeof (arg) === 'number' && !isNaN(arg); + } + }, + finite: { + check: function (arg) { + return typeof (arg) === 'number' && !isNaN(arg) && isFinite(arg); + } + }, + buffer: { + check: function (arg) { return Buffer.isBuffer(arg); }, + operator: 'Buffer.isBuffer' + }, + array: { + check: function (arg) { return Array.isArray(arg); }, + operator: 'Array.isArray' + }, + stream: { + check: function (arg) { return arg instanceof Stream; }, + operator: 'instanceof', + actual: _getClass + }, + date: { + check: function (arg) { return arg instanceof Date; }, + operator: 'instanceof', + actual: _getClass + }, + regexp: { + check: function (arg) { return arg instanceof RegExp; }, + operator: 'instanceof', + actual: _getClass + }, + uuid: { + check: function (arg) { + return typeof (arg) === 'string' && UUID_REGEXP.test(arg); + }, + operator: 'isUUID' } +}; - delete headers.Authorization - delete headers.authorization - } -} - -RequestSigner.prototype.sign = function() { - if (!this.parsedPath) this.prepareRequest() - - if (this.request.signQuery) { - this.parsedPath.query['X-Amz-Signature'] = this.signature() - } else { - this.request.headers.Authorization = this.authHeader() - } +function _setExports(ndebug) { + var keys = Object.keys(types); + var out; - this.request.path = this.formatPath() + /* re-export standard assert */ + if (process.env.NODE_NDEBUG) { + out = noop; + } else { + out = function (arg, msg) { + if (!arg) { + _toss(msg, 'true', arg); + } + }; + } - return this.request -} + /* standard checks */ + keys.forEach(function (k) { + if (ndebug) { + out[k] = noop; + return; + } + var type = types[k]; + out[k] = function (arg, msg) { + if (!type.check(arg)) { + _toss(msg, k, type.operator, arg, type.actual); + } + }; + }); -RequestSigner.prototype.getDateTime = function() { - if (!this.datetime) { - var headers = this.request.headers, - date = new Date(headers.Date || headers.date || new Date) + /* optional checks */ + keys.forEach(function (k) { + var name = 'optional' + _capitalize(k); + if (ndebug) { + out[name] = noop; + return; + } + var type = types[k]; + out[name] = function (arg, msg) { + if (arg === undefined || arg === null) { + return; + } + if (!type.check(arg)) { + _toss(msg, k, type.operator, arg, type.actual); + } + }; + }); - this.datetime = date.toISOString().replace(/[:\-]|\.\d{3}/g, '') + /* arrayOf checks */ + keys.forEach(function (k) { + var name = 'arrayOf' + _capitalize(k); + if (ndebug) { + out[name] = noop; + return; + } + var type = types[k]; + var expected = '[' + k + ']'; + out[name] = function (arg, msg) { + if (!Array.isArray(arg)) { + _toss(msg, expected, type.operator, arg, type.actual); + } + var i; + for (i = 0; i < arg.length; i++) { + if (!type.check(arg[i])) { + _toss(msg, expected, type.operator, arg, type.actual); + } + } + }; + }); - // Remove the trailing 'Z' on the timestamp string for CodeCommit git access - if (this.isCodeCommitGit) this.datetime = this.datetime.slice(0, -1) - } - return this.datetime -} + /* optionalArrayOf checks */ + keys.forEach(function (k) { + var name = 'optionalArrayOf' + _capitalize(k); + if (ndebug) { + out[name] = noop; + return; + } + var type = types[k]; + var expected = '[' + k + ']'; + out[name] = function (arg, msg) { + if (arg === undefined || arg === null) { + return; + } + if (!Array.isArray(arg)) { + _toss(msg, expected, type.operator, arg, type.actual); + } + var i; + for (i = 0; i < arg.length; i++) { + if (!type.check(arg[i])) { + _toss(msg, expected, type.operator, arg, type.actual); + } + } + }; + }); -RequestSigner.prototype.getDate = function() { - return this.getDateTime().substr(0, 8) -} + /* re-export built-in assertions */ + Object.keys(assert).forEach(function (k) { + if (k === 'AssertionError') { + out[k] = assert[k]; + return; + } + if (ndebug) { + out[k] = noop; + return; + } + out[k] = assert[k]; + }); -RequestSigner.prototype.authHeader = function() { - return [ - 'AWS4-HMAC-SHA256 Credential=' + this.credentials.accessKeyId + '/' + this.credentialString(), - 'SignedHeaders=' + this.signedHeaders(), - 'Signature=' + this.signature(), - ].join(', ') -} + /* export ourselves (for unit tests _only_) */ + out._setExports = _setExports; -RequestSigner.prototype.signature = function() { - var date = this.getDate(), - cacheKey = [this.credentials.secretAccessKey, date, this.region, this.service].join(), - kDate, kRegion, kService, kCredentials = credentialsCache.get(cacheKey) - if (!kCredentials) { - kDate = hmac('AWS4' + this.credentials.secretAccessKey, date) - kRegion = hmac(kDate, this.region) - kService = hmac(kRegion, this.service) - kCredentials = hmac(kService, 'aws4_request') - credentialsCache.set(cacheKey, kCredentials) - } - return hmac(kCredentials, this.stringToSign(), 'hex') + return out; } -RequestSigner.prototype.stringToSign = function() { - return [ - 'AWS4-HMAC-SHA256', - this.getDateTime(), - this.credentialString(), - hash(this.canonicalString(), 'hex'), - ].join('\n') -} +module.exports = _setExports(process.env.NODE_NDEBUG); -RequestSigner.prototype.canonicalString = function() { - if (!this.parsedPath) this.prepareRequest() - var pathStr = this.parsedPath.path, - query = this.parsedPath.query, - headers = this.request.headers, - queryStr = '', - normalizePath = this.service !== 's3', - decodePath = this.service === 's3' || this.request.doNotEncodePath, - decodeSlashesInPath = this.service === 's3', - firstValOnly = this.service === 's3', - bodyHash +/***/ }), - if (this.service === 's3' && this.request.signQuery) { - bodyHash = 'UNSIGNED-PAYLOAD' - } else if (this.isCodeCommitGit) { - bodyHash = '' - } else { - bodyHash = headers['X-Amz-Content-Sha256'] || headers['x-amz-content-sha256'] || - hash(this.request.body || '', 'hex') - } +/***/ 57888: +/***/ (function(__unused_webpack_module, exports) { - if (query) { - var reducedQuery = Object.keys(query).reduce(function(obj, key) { - if (!key) return obj - obj[encodeRfc3986Full(key)] = !Array.isArray(query[key]) ? query[key] : - (firstValOnly ? query[key][0] : query[key]) - return obj - }, {}) - var encodedQueryPieces = [] - Object.keys(reducedQuery).sort().forEach(function(key) { - if (!Array.isArray(reducedQuery[key])) { - encodedQueryPieces.push(key + '=' + encodeRfc3986Full(reducedQuery[key])) - } else { - reducedQuery[key].map(encodeRfc3986Full).sort() - .forEach(function(val) { encodedQueryPieces.push(key + '=' + val) }) - } - }) - queryStr = encodedQueryPieces.join('&') - } - if (pathStr !== '/') { - if (normalizePath) pathStr = pathStr.replace(/\/{2,}/g, '/') - pathStr = pathStr.split('/').reduce(function(path, piece) { - if (normalizePath && piece === '..') { - path.pop() - } else if (!normalizePath || piece !== '.') { - if (decodePath) piece = decodeURIComponent(piece.replace(/\+/g, ' ')) - path.push(encodeRfc3986Full(piece)) - } - return path - }, []).join('/') - if (pathStr[0] !== '/') pathStr = '/' + pathStr - if (decodeSlashesInPath) pathStr = pathStr.replace(/%2F/g, '/') - } +(function (global, factory) { + true ? factory(exports) : + 0; +}(this, (function (exports) { 'use strict'; - return [ - this.request.method || 'GET', - pathStr, - queryStr, - this.canonicalHeaders() + '\n', - this.signedHeaders(), - bodyHash, - ].join('\n') -} + /** + * Creates a continuation function with some arguments already applied. + * + * Useful as a shorthand when combined with other control flow functions. Any + * arguments passed to the returned function are added to the arguments + * originally passed to apply. + * + * @name apply + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {Function} fn - The function you want to eventually apply all + * arguments to. Invokes with (arguments...). + * @param {...*} arguments... - Any number of arguments to automatically apply + * when the continuation is called. + * @returns {Function} the partially-applied function + * @example + * + * // using apply + * async.parallel([ + * async.apply(fs.writeFile, 'testfile1', 'test1'), + * async.apply(fs.writeFile, 'testfile2', 'test2') + * ]); + * + * + * // the same process without using apply + * async.parallel([ + * function(callback) { + * fs.writeFile('testfile1', 'test1', callback); + * }, + * function(callback) { + * fs.writeFile('testfile2', 'test2', callback); + * } + * ]); + * + * // It's possible to pass any number of additional arguments when calling the + * // continuation: + * + * node> var fn = async.apply(sys.puts, 'one'); + * node> fn('two', 'three'); + * one + * two + * three + */ + function apply(fn, ...args) { + return (...callArgs) => fn(...args,...callArgs); + } -RequestSigner.prototype.canonicalHeaders = function() { - var headers = this.request.headers - function trimAll(header) { - return header.toString().trim().replace(/\s+/g, ' ') - } - return Object.keys(headers) - .filter(function(key) { return HEADERS_TO_IGNORE[key.toLowerCase()] == null }) - .sort(function(a, b) { return a.toLowerCase() < b.toLowerCase() ? -1 : 1 }) - .map(function(key) { return key.toLowerCase() + ':' + trimAll(headers[key]) }) - .join('\n') -} + function initialParams (fn) { + return function (...args/*, callback*/) { + var callback = args.pop(); + return fn.call(this, args, callback); + }; + } -RequestSigner.prototype.signedHeaders = function() { - return Object.keys(this.request.headers) - .map(function(key) { return key.toLowerCase() }) - .filter(function(key) { return HEADERS_TO_IGNORE[key] == null }) - .sort() - .join(';') -} + /* istanbul ignore file */ -RequestSigner.prototype.credentialString = function() { - return [ - this.getDate(), - this.region, - this.service, - 'aws4_request', - ].join('/') -} + var hasSetImmediate = typeof setImmediate === 'function' && setImmediate; + var hasNextTick = typeof process === 'object' && typeof process.nextTick === 'function'; -RequestSigner.prototype.defaultCredentials = function() { - var env = process.env - return { - accessKeyId: env.AWS_ACCESS_KEY_ID || env.AWS_ACCESS_KEY, - secretAccessKey: env.AWS_SECRET_ACCESS_KEY || env.AWS_SECRET_KEY, - sessionToken: env.AWS_SESSION_TOKEN, - } -} + function fallback(fn) { + setTimeout(fn, 0); + } -RequestSigner.prototype.parsePath = function() { - var path = this.request.path || '/' + function wrap(defer) { + return (fn, ...args) => defer(() => fn(...args)); + } - // S3 doesn't always encode characters > 127 correctly and - // all services don't encode characters > 255 correctly - // So if there are non-reserved chars (and it's not already all % encoded), just encode them all - if (/[^0-9A-Za-z;,/?:@&=+$\-_.!~*'()#%]/.test(path)) { - path = encodeURI(decodeURI(path)) - } + var _defer; - var queryIx = path.indexOf('?'), - query = null + if (hasSetImmediate) { + _defer = setImmediate; + } else if (hasNextTick) { + _defer = process.nextTick; + } else { + _defer = fallback; + } - if (queryIx >= 0) { - query = querystring.parse(path.slice(queryIx + 1)) - path = path.slice(0, queryIx) - } + var setImmediate$1 = wrap(_defer); - this.parsedPath = { - path: path, - query: query, - } -} + /** + * Take a sync function and make it async, passing its return value to a + * callback. This is useful for plugging sync functions into a waterfall, + * series, or other async functions. Any arguments passed to the generated + * function will be passed to the wrapped function (except for the final + * callback argument). Errors thrown will be passed to the callback. + * + * If the function passed to `asyncify` returns a Promise, that promises's + * resolved/rejected state will be used to call the callback, rather than simply + * the synchronous return value. + * + * This also means you can asyncify ES2017 `async` functions. + * + * @name asyncify + * @static + * @memberOf module:Utils + * @method + * @alias wrapSync + * @category Util + * @param {Function} func - The synchronous function, or Promise-returning + * function to convert to an {@link AsyncFunction}. + * @returns {AsyncFunction} An asynchronous wrapper of the `func`. To be + * invoked with `(args..., callback)`. + * @example + * + * // passing a regular synchronous function + * async.waterfall([ + * async.apply(fs.readFile, filename, "utf8"), + * async.asyncify(JSON.parse), + * function (data, next) { + * // data is the result of parsing the text. + * // If there was a parsing error, it would have been caught. + * } + * ], callback); + * + * // passing a function returning a promise + * async.waterfall([ + * async.apply(fs.readFile, filename, "utf8"), + * async.asyncify(function (contents) { + * return db.model.create(contents); + * }), + * function (model, next) { + * // `model` is the instantiated model object. + * // If there was an error, this function would be skipped. + * } + * ], callback); + * + * // es2017 example, though `asyncify` is not needed if your JS environment + * // supports async functions out of the box + * var q = async.queue(async.asyncify(async function(file) { + * var intermediateStep = await processFile(file); + * return await somePromise(intermediateStep) + * })); + * + * q.push(files); + */ + function asyncify(func) { + if (isAsync(func)) { + return function (...args/*, callback*/) { + const callback = args.pop(); + const promise = func.apply(this, args); + return handlePromise(promise, callback) + } + } -RequestSigner.prototype.formatPath = function() { - var path = this.parsedPath.path, - query = this.parsedPath.query + return initialParams(function (args, callback) { + var result; + try { + result = func.apply(this, args); + } catch (e) { + return callback(e); + } + // if result is Promise object + if (result && typeof result.then === 'function') { + return handlePromise(result, callback) + } else { + callback(null, result); + } + }); + } - if (!query) return path + function handlePromise(promise, callback) { + return promise.then(value => { + invokeCallback(callback, null, value); + }, err => { + invokeCallback(callback, err && err.message ? err : new Error(err)); + }); + } - // Services don't support empty query string keys - if (query[''] != null) delete query[''] + function invokeCallback(callback, error, value) { + try { + callback(error, value); + } catch (err) { + setImmediate$1(e => { throw e }, err); + } + } - return path + '?' + encodeRfc3986(querystring.stringify(query)) -} + function isAsync(fn) { + return fn[Symbol.toStringTag] === 'AsyncFunction'; + } -aws4.RequestSigner = RequestSigner + function isAsyncGenerator(fn) { + return fn[Symbol.toStringTag] === 'AsyncGenerator'; + } -aws4.sign = function(request, credentials) { - return new RequestSigner(request, credentials).sign() -} + function isAsyncIterable(obj) { + return typeof obj[Symbol.asyncIterator] === 'function'; + } + function wrapAsync(asyncFn) { + if (typeof asyncFn !== 'function') throw new Error('expected a function') + return isAsync(asyncFn) ? asyncify(asyncFn) : asyncFn; + } -/***/ }), + // conditionally promisify a function. + // only return a promise if a callback is omitted + function awaitify (asyncFn, arity = asyncFn.length) { + if (!arity) throw new Error('arity is undefined') + function awaitable (...args) { + if (typeof args[arity - 1] === 'function') { + return asyncFn.apply(this, args) + } -/***/ 74225: -/***/ ((module) => { + return new Promise((resolve, reject) => { + args[arity - 1] = (err, ...cbArgs) => { + if (err) return reject(err) + resolve(cbArgs.length > 1 ? cbArgs : cbArgs[0]); + }; + asyncFn.apply(this, args); + }) + } -module.exports = function(size) { - return new LruCache(size) -} + return awaitable + } -function LruCache(size) { - this.capacity = size | 0 - this.map = Object.create(null) - this.list = new DoublyLinkedList() -} + function applyEach (eachfn) { + return function applyEach(fns, ...callArgs) { + const go = awaitify(function (callback) { + var that = this; + return eachfn(fns, (fn, cb) => { + wrapAsync(fn).apply(that, callArgs.concat(cb)); + }, callback); + }); + return go; + }; + } -LruCache.prototype.get = function(key) { - var node = this.map[key] - if (node == null) return undefined - this.used(node) - return node.val -} + function _asyncMap(eachfn, arr, iteratee, callback) { + arr = arr || []; + var results = []; + var counter = 0; + var _iteratee = wrapAsync(iteratee); -LruCache.prototype.set = function(key, val) { - var node = this.map[key] - if (node != null) { - node.val = val - } else { - if (!this.capacity) this.prune() - if (!this.capacity) return false - node = new DoublyLinkedNode(key, val) - this.map[key] = node - this.capacity-- - } - this.used(node) - return true -} + return eachfn(arr, (value, _, iterCb) => { + var index = counter++; + _iteratee(value, (err, v) => { + results[index] = v; + iterCb(err); + }); + }, err => { + callback(err, results); + }); + } -LruCache.prototype.used = function(node) { - this.list.moveToFront(node) -} + function isArrayLike(value) { + return value && + typeof value.length === 'number' && + value.length >= 0 && + value.length % 1 === 0; + } -LruCache.prototype.prune = function() { - var node = this.list.pop() - if (node != null) { - delete this.map[node.key] - this.capacity++ - } -} + // A temporary value used to identify if the loop should be broken. + // See #1064, #1293 + const breakLoop = {}; + function once(fn) { + function wrapper (...args) { + if (fn === null) return; + var callFn = fn; + fn = null; + callFn.apply(this, args); + } + Object.assign(wrapper, fn); + return wrapper + } -function DoublyLinkedList() { - this.firstNode = null - this.lastNode = null -} + function getIterator (coll) { + return coll[Symbol.iterator] && coll[Symbol.iterator](); + } -DoublyLinkedList.prototype.moveToFront = function(node) { - if (this.firstNode == node) return + function createArrayIterator(coll) { + var i = -1; + var len = coll.length; + return function next() { + return ++i < len ? {value: coll[i], key: i} : null; + } + } - this.remove(node) + function createES2015Iterator(iterator) { + var i = -1; + return function next() { + var item = iterator.next(); + if (item.done) + return null; + i++; + return {value: item.value, key: i}; + } + } - if (this.firstNode == null) { - this.firstNode = node - this.lastNode = node - node.prev = null - node.next = null - } else { - node.prev = null - node.next = this.firstNode - node.next.prev = node - this.firstNode = node - } -} + function createObjectIterator(obj) { + var okeys = obj ? Object.keys(obj) : []; + var i = -1; + var len = okeys.length; + return function next() { + var key = okeys[++i]; + return i < len ? {value: obj[key], key} : null; + }; + } -DoublyLinkedList.prototype.pop = function() { - var lastNode = this.lastNode - if (lastNode != null) { - this.remove(lastNode) - } - return lastNode -} + function createIterator(coll) { + if (isArrayLike(coll)) { + return createArrayIterator(coll); + } -DoublyLinkedList.prototype.remove = function(node) { - if (this.firstNode == node) { - this.firstNode = node.next - } else if (node.prev != null) { - node.prev.next = node.next - } - if (this.lastNode == node) { - this.lastNode = node.prev - } else if (node.next != null) { - node.next.prev = node.prev - } -} + var iterator = getIterator(coll); + return iterator ? createES2015Iterator(iterator) : createObjectIterator(coll); + } + function onlyOnce(fn) { + return function (...args) { + if (fn === null) throw new Error("Callback was already called."); + var callFn = fn; + fn = null; + callFn.apply(this, args); + }; + } -function DoublyLinkedNode(key, val) { - this.key = key - this.val = val - this.prev = null - this.next = null -} + // for async generators + function asyncEachOfLimit(generator, limit, iteratee, callback) { + let done = false; + let canceled = false; + let awaiting = false; + let running = 0; + let idx = 0; + function replenish() { + //console.log('replenish') + if (running >= limit || awaiting || done) return + //console.log('replenish awaiting') + awaiting = true; + generator.next().then(({value, done: iterDone}) => { + //console.log('got value', value) + if (canceled || done) return + awaiting = false; + if (iterDone) { + done = true; + if (running <= 0) { + //console.log('done nextCb') + callback(null); + } + return; + } + running++; + iteratee(value, idx, iterateeCallback); + idx++; + replenish(); + }).catch(handleError); + } -/***/ }), + function iterateeCallback(err, result) { + //console.log('iterateeCallback') + running -= 1; + if (canceled) return + if (err) return handleError(err) -/***/ 96545: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (err === false) { + done = true; + canceled = true; + return + } -module.exports = __nccwpck_require__(52618); + if (result === breakLoop || (done && running <= 0)) { + done = true; + //console.log('done iterCb') + return callback(null); + } + replenish(); + } -/***/ }), + function handleError(err) { + if (canceled) return + awaiting = false; + done = true; + callback(err); + } -/***/ 68104: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + replenish(); + } -"use strict"; + var eachOfLimit = (limit) => { + return (obj, iteratee, callback) => { + callback = once(callback); + if (limit <= 0) { + throw new RangeError('concurrency limit cannot be less than 1') + } + if (!obj) { + return callback(null); + } + if (isAsyncGenerator(obj)) { + return asyncEachOfLimit(obj, limit, iteratee, callback) + } + if (isAsyncIterable(obj)) { + return asyncEachOfLimit(obj[Symbol.asyncIterator](), limit, iteratee, callback) + } + var nextElem = createIterator(obj); + var done = false; + var canceled = false; + var running = 0; + var looping = false; + function iterateeCallback(err, value) { + if (canceled) return + running -= 1; + if (err) { + done = true; + callback(err); + } + else if (err === false) { + done = true; + canceled = true; + } + else if (value === breakLoop || (done && running <= 0)) { + done = true; + return callback(null); + } + else if (!looping) { + replenish(); + } + } -var utils = __nccwpck_require__(20328); -var settle = __nccwpck_require__(13211); -var buildFullPath = __nccwpck_require__(41934); -var buildURL = __nccwpck_require__(30646); -var http = __nccwpck_require__(98605); -var https = __nccwpck_require__(57211); -var httpFollow = __nccwpck_require__(67707).http; -var httpsFollow = __nccwpck_require__(67707).https; -var url = __nccwpck_require__(78835); -var zlib = __nccwpck_require__(78761); -var pkg = __nccwpck_require__(20696); -var createError = __nccwpck_require__(15226); -var enhanceError = __nccwpck_require__(21516); + function replenish () { + looping = true; + while (running < limit && !done) { + var elem = nextElem(); + if (elem === null) { + done = true; + if (running <= 0) { + callback(null); + } + return; + } + running += 1; + iteratee(elem.value, elem.key, onlyOnce(iterateeCallback)); + } + looping = false; + } -var isHttps = /https:?/; + replenish(); + }; + }; -/** - * - * @param {http.ClientRequestArgs} options - * @param {AxiosProxyConfig} proxy - * @param {string} location - */ -function setProxy(options, proxy, location) { - options.hostname = proxy.host; - options.host = proxy.host; - options.port = proxy.port; - options.path = location; + /** + * The same as [`eachOf`]{@link module:Collections.eachOf} but runs a maximum of `limit` async operations at a + * time. + * + * @name eachOfLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.eachOf]{@link module:Collections.eachOf} + * @alias forEachOfLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each + * item in `coll`. The `key` is the item's key, or index in the case of an + * array. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ + function eachOfLimit$1(coll, limit, iteratee, callback) { + return eachOfLimit(limit)(coll, wrapAsync(iteratee), callback); + } - // Basic proxy authorization - if (proxy.auth) { - var base64 = Buffer.from(proxy.auth.username + ':' + proxy.auth.password, 'utf8').toString('base64'); - options.headers['Proxy-Authorization'] = 'Basic ' + base64; - } + var eachOfLimit$2 = awaitify(eachOfLimit$1, 4); - // If a proxy is used, any redirects must also pass through the proxy - options.beforeRedirect = function beforeRedirect(redirection) { - redirection.headers.host = redirection.host; - setProxy(redirection, proxy, redirection.href); - }; -} + // eachOf implementation optimized for array-likes + function eachOfArrayLike(coll, iteratee, callback) { + callback = once(callback); + var index = 0, + completed = 0, + {length} = coll, + canceled = false; + if (length === 0) { + callback(null); + } -/*eslint consistent-return:0*/ -module.exports = function httpAdapter(config) { - return new Promise(function dispatchHttpRequest(resolvePromise, rejectPromise) { - var resolve = function resolve(value) { - resolvePromise(value); - }; - var reject = function reject(value) { - rejectPromise(value); - }; - var data = config.data; - var headers = config.headers; + function iteratorCallback(err, value) { + if (err === false) { + canceled = true; + } + if (canceled === true) return + if (err) { + callback(err); + } else if ((++completed === length) || value === breakLoop) { + callback(null); + } + } - // Set User-Agent (required by some servers) - // Only set header if it hasn't been set in config - // See https://github.com/axios/axios/issues/69 - if (!headers['User-Agent'] && !headers['user-agent']) { - headers['User-Agent'] = 'axios/' + pkg.version; + for (; index < length; index++) { + iteratee(coll[index], index, onlyOnce(iteratorCallback)); + } } - if (data && !utils.isStream(data)) { - if (Buffer.isBuffer(data)) { - // Nothing to do... - } else if (utils.isArrayBuffer(data)) { - data = Buffer.from(new Uint8Array(data)); - } else if (utils.isString(data)) { - data = Buffer.from(data, 'utf-8'); - } else { - return reject(createError( - 'Data after transformation must be a string, an ArrayBuffer, a Buffer, or a Stream', - config - )); - } - - // Add Content-Length header if data exists - headers['Content-Length'] = data.length; + // a generic version of eachOf which can handle array, object, and iterator cases. + function eachOfGeneric (coll, iteratee, callback) { + return eachOfLimit$2(coll, Infinity, iteratee, callback); } - // HTTP basic authentication - var auth = undefined; - if (config.auth) { - var username = config.auth.username || ''; - var password = config.auth.password || ''; - auth = username + ':' + password; + /** + * Like [`each`]{@link module:Collections.each}, except that it passes the key (or index) as the second argument + * to the iteratee. + * + * @name eachOf + * @static + * @memberOf module:Collections + * @method + * @alias forEachOf + * @category Collection + * @see [async.each]{@link module:Collections.each} + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each + * item in `coll`. + * The `key` is the item's key, or index in the case of an array. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * var obj = {dev: "/dev.json", test: "/test.json", prod: "/prod.json"}; + * var configs = {}; + * + * async.forEachOf(obj, function (value, key, callback) { + * fs.readFile(__dirname + value, "utf8", function (err, data) { + * if (err) return callback(err); + * try { + * configs[key] = JSON.parse(data); + * } catch (e) { + * return callback(e); + * } + * callback(); + * }); + * }, function (err) { + * if (err) console.error(err.message); + * // configs is now a map of JSON data + * doSomethingWith(configs); + * }); + */ + function eachOf(coll, iteratee, callback) { + var eachOfImplementation = isArrayLike(coll) ? eachOfArrayLike : eachOfGeneric; + return eachOfImplementation(coll, wrapAsync(iteratee), callback); } - // Parse url - var fullPath = buildFullPath(config.baseURL, config.url); - var parsed = url.parse(fullPath); - var protocol = parsed.protocol || 'http:'; + var eachOf$1 = awaitify(eachOf, 3); - if (!auth && parsed.auth) { - var urlAuth = parsed.auth.split(':'); - var urlUsername = urlAuth[0] || ''; - var urlPassword = urlAuth[1] || ''; - auth = urlUsername + ':' + urlPassword; - } - - if (auth) { - delete headers.Authorization; + /** + * Produces a new collection of values by mapping each value in `coll` through + * the `iteratee` function. The `iteratee` is called with an item from `coll` + * and a callback for when it has finished processing. Each of these callback + * takes 2 arguments: an `error`, and the transformed item from `coll`. If + * `iteratee` passes an error to its callback, the main `callback` (for the + * `map` function) is immediately called with the error. + * + * Note, that since this function applies the `iteratee` to each item in + * parallel, there is no guarantee that the `iteratee` functions will complete + * in order. However, the results array will be in the same order as the + * original `coll`. + * + * If `map` is passed an Object, the results will be an Array. The results + * will roughly be in the order of the original Objects' keys (but this can + * vary across JavaScript engines). + * + * @name map + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an Array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * async.map(['file1','file2','file3'], fs.stat, function(err, results) { + * // results is now an array of stats for each file + * }); + */ + function map (coll, iteratee, callback) { + return _asyncMap(eachOf$1, coll, iteratee, callback) } + var map$1 = awaitify(map, 3); - var isHttpsRequest = isHttps.test(protocol); - var agent = isHttpsRequest ? config.httpsAgent : config.httpAgent; - - var options = { - path: buildURL(parsed.path, config.params, config.paramsSerializer).replace(/^\?/, ''), - method: config.method.toUpperCase(), - headers: headers, - agent: agent, - agents: { http: config.httpAgent, https: config.httpsAgent }, - auth: auth - }; + /** + * Applies the provided arguments to each function in the array, calling + * `callback` after all functions have completed. If you only provide the first + * argument, `fns`, then it will return a function which lets you pass in the + * arguments as if it were a single function call. If more arguments are + * provided, `callback` is required while `args` is still optional. The results + * for each of the applied async functions are passed to the final callback + * as an array. + * + * @name applyEach + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s + * to all call with the same arguments + * @param {...*} [args] - any number of separate arguments to pass to the + * function. + * @param {Function} [callback] - the final argument should be the callback, + * called when all functions have completed processing. + * @returns {AsyncFunction} - Returns a function that takes no args other than + * an optional callback, that is the result of applying the `args` to each + * of the functions. + * @example + * + * const appliedFn = async.applyEach([enableSearch, updateSchema], 'bucket') + * + * appliedFn((err, results) => { + * // results[0] is the results for `enableSearch` + * // results[1] is the results for `updateSchema` + * }); + * + * // partial application example: + * async.each( + * buckets, + * async (bucket) => async.applyEach([enableSearch, updateSchema], bucket)(), + * callback + * ); + */ + var applyEach$1 = applyEach(map$1); - if (config.socketPath) { - options.socketPath = config.socketPath; - } else { - options.hostname = parsed.hostname; - options.port = parsed.port; + /** + * The same as [`eachOf`]{@link module:Collections.eachOf} but runs only a single async operation at a time. + * + * @name eachOfSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.eachOf]{@link module:Collections.eachOf} + * @alias forEachOfSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ + function eachOfSeries(coll, iteratee, callback) { + return eachOfLimit$2(coll, 1, iteratee, callback) } + var eachOfSeries$1 = awaitify(eachOfSeries, 3); - var proxy = config.proxy; - if (!proxy && proxy !== false) { - var proxyEnv = protocol.slice(0, -1) + '_proxy'; - var proxyUrl = process.env[proxyEnv] || process.env[proxyEnv.toUpperCase()]; - if (proxyUrl) { - var parsedProxyUrl = url.parse(proxyUrl); - var noProxyEnv = process.env.no_proxy || process.env.NO_PROXY; - var shouldProxy = true; - - if (noProxyEnv) { - var noProxy = noProxyEnv.split(',').map(function trim(s) { - return s.trim(); - }); - - shouldProxy = !noProxy.some(function proxyMatch(proxyElement) { - if (!proxyElement) { - return false; - } - if (proxyElement === '*') { - return true; - } - if (proxyElement[0] === '.' && - parsed.hostname.substr(parsed.hostname.length - proxyElement.length) === proxyElement) { - return true; - } + /** + * The same as [`map`]{@link module:Collections.map} but runs only a single async operation at a time. + * + * @name mapSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.map]{@link module:Collections.map} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ + function mapSeries (coll, iteratee, callback) { + return _asyncMap(eachOfSeries$1, coll, iteratee, callback) + } + var mapSeries$1 = awaitify(mapSeries, 3); - return parsed.hostname === proxyElement; - }); - } + /** + * The same as [`applyEach`]{@link module:ControlFlow.applyEach} but runs only a single async operation at a time. + * + * @name applyEachSeries + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.applyEach]{@link module:ControlFlow.applyEach} + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s to all + * call with the same arguments + * @param {...*} [args] - any number of separate arguments to pass to the + * function. + * @param {Function} [callback] - the final argument should be the callback, + * called when all functions have completed processing. + * @returns {AsyncFunction} - A function, that when called, is the result of + * appling the `args` to the list of functions. It takes no args, other than + * a callback. + */ + var applyEachSeries = applyEach(mapSeries$1); - if (shouldProxy) { - proxy = { - host: parsedProxyUrl.hostname, - port: parsedProxyUrl.port, - protocol: parsedProxyUrl.protocol - }; + const PROMISE_SYMBOL = Symbol('promiseCallback'); - if (parsedProxyUrl.auth) { - var proxyUrlAuth = parsedProxyUrl.auth.split(':'); - proxy.auth = { - username: proxyUrlAuth[0], - password: proxyUrlAuth[1] - }; - } + function promiseCallback () { + let resolve, reject; + function callback (err, ...args) { + if (err) return reject(err) + resolve(args.length > 1 ? args : args[0]); } - } - } - - if (proxy) { - options.headers.host = parsed.hostname + (parsed.port ? ':' + parsed.port : ''); - setProxy(options, proxy, protocol + '//' + parsed.hostname + (parsed.port ? ':' + parsed.port : '') + options.path); - } - var transport; - var isHttpsProxy = isHttpsRequest && (proxy ? isHttps.test(proxy.protocol) : true); - if (config.transport) { - transport = config.transport; - } else if (config.maxRedirects === 0) { - transport = isHttpsProxy ? https : http; - } else { - if (config.maxRedirects) { - options.maxRedirects = config.maxRedirects; - } - transport = isHttpsProxy ? httpsFollow : httpFollow; - } + callback[PROMISE_SYMBOL] = new Promise((res, rej) => { + resolve = res, + reject = rej; + }); - if (config.maxBodyLength > -1) { - options.maxBodyLength = config.maxBodyLength; + return callback } - // Create the request - var req = transport.request(options, function handleResponse(res) { - if (req.aborted) return; - - // uncompress the response body transparently if required - var stream = res; - - // return the last request in case of redirects - var lastRequest = res.req || req; + /** + * Determines the best order for running the {@link AsyncFunction}s in `tasks`, based on + * their requirements. Each function can optionally depend on other functions + * being completed first, and each function is run as soon as its requirements + * are satisfied. + * + * If any of the {@link AsyncFunction}s pass an error to their callback, the `auto` sequence + * will stop. Further tasks will not execute (so any other functions depending + * on it will not run), and the main `callback` is immediately called with the + * error. + * + * {@link AsyncFunction}s also receive an object containing the results of functions which + * have completed so far as the first argument, if they have dependencies. If a + * task function has no dependencies, it will only be passed a callback. + * + * @name auto + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Object} tasks - An object. Each of its properties is either a + * function or an array of requirements, with the {@link AsyncFunction} itself the last item + * in the array. The object's key of a property serves as the name of the task + * defined by that property, i.e. can be used when specifying requirements for + * other tasks. The function receives one or two arguments: + * * a `results` object, containing the results of the previously executed + * functions, only passed if the task has any dependencies, + * * a `callback(err, result)` function, which must be called when finished, + * passing an `error` (which can be `null`) and the result of the function's + * execution. + * @param {number} [concurrency=Infinity] - An optional `integer` for + * determining the maximum number of tasks that can be run in parallel. By + * default, as many as possible. + * @param {Function} [callback] - An optional callback which is called when all + * the tasks have been completed. It receives the `err` argument if any `tasks` + * pass an error to their callback. Results are always returned; however, if an + * error occurs, no further `tasks` will be performed, and the results object + * will only contain partial results. Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + * @example + * + * async.auto({ + * // this function will just be passed a callback + * readData: async.apply(fs.readFile, 'data.txt', 'utf-8'), + * showData: ['readData', function(results, cb) { + * // results.readData is the file's contents + * // ... + * }] + * }, callback); + * + * async.auto({ + * get_data: function(callback) { + * console.log('in get_data'); + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * console.log('in make_folder'); + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * console.log('in write_file', JSON.stringify(results)); + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * console.log('in email_link', JSON.stringify(results)); + * // once the file is written let's email a link to it... + * // results.write_file contains the filename returned by write_file. + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }, function(err, results) { + * console.log('err = ', err); + * console.log('results = ', results); + * }); + */ + function auto(tasks, concurrency, callback) { + if (typeof concurrency !== 'number') { + // concurrency is optional, shift the args. + callback = concurrency; + concurrency = null; + } + callback = once(callback || promiseCallback()); + var numTasks = Object.keys(tasks).length; + if (!numTasks) { + return callback(null); + } + if (!concurrency) { + concurrency = numTasks; + } + var results = {}; + var runningTasks = 0; + var canceled = false; + var hasError = false; - // if no content, is HEAD request or decompress disabled we should not decompress - if (res.statusCode !== 204 && lastRequest.method !== 'HEAD' && config.decompress !== false) { - switch (res.headers['content-encoding']) { - /*eslint default-case:0*/ - case 'gzip': - case 'compress': - case 'deflate': - // add the unzipper to the body stream processing pipeline - stream = stream.pipe(zlib.createUnzip()); + var listeners = Object.create(null); - // remove the content-encoding in order to not confuse downstream operations - delete res.headers['content-encoding']; - break; - } - } + var readyTasks = []; - var response = { - status: res.statusCode, - statusText: res.statusMessage, - headers: res.headers, - config: config, - request: lastRequest - }; + // for cycle detection: + var readyToCheck = []; // tasks that have been identified as reachable + // without the possibility of returning to an ancestor task + var uncheckedDependencies = {}; - if (config.responseType === 'stream') { - response.data = stream; - settle(resolve, reject, response); - } else { - var responseBuffer = []; - stream.on('data', function handleStreamData(chunk) { - responseBuffer.push(chunk); + Object.keys(tasks).forEach(key => { + var task = tasks[key]; + if (!Array.isArray(task)) { + // no dependencies + enqueueTask(key, [task]); + readyToCheck.push(key); + return; + } - // make sure the content length is not over the maxContentLength if specified - if (config.maxContentLength > -1 && Buffer.concat(responseBuffer).length > config.maxContentLength) { - stream.destroy(); - reject(createError('maxContentLength size of ' + config.maxContentLength + ' exceeded', - config, null, lastRequest)); - } - }); + var dependencies = task.slice(0, task.length - 1); + var remainingDependencies = dependencies.length; + if (remainingDependencies === 0) { + enqueueTask(key, task); + readyToCheck.push(key); + return; + } + uncheckedDependencies[key] = remainingDependencies; - stream.on('error', function handleStreamError(err) { - if (req.aborted) return; - reject(enhanceError(err, config, null, lastRequest)); + dependencies.forEach(dependencyName => { + if (!tasks[dependencyName]) { + throw new Error('async.auto task `' + key + + '` has a non-existent dependency `' + + dependencyName + '` in ' + + dependencies.join(', ')); + } + addListener(dependencyName, () => { + remainingDependencies--; + if (remainingDependencies === 0) { + enqueueTask(key, task); + } + }); + }); }); - stream.on('end', function handleStreamEnd() { - var responseData = Buffer.concat(responseBuffer); - if (config.responseType !== 'arraybuffer') { - responseData = responseData.toString(config.responseEncoding); - if (!config.responseEncoding || config.responseEncoding === 'utf8') { - responseData = utils.stripBOM(responseData); - } - } + checkForDeadlocks(); + processQueue(); - response.data = responseData; - settle(resolve, reject, response); - }); - } - }); + function enqueueTask(key, task) { + readyTasks.push(() => runTask(key, task)); + } - // Handle errors - req.on('error', function handleRequestError(err) { - if (req.aborted && err.code !== 'ERR_FR_TOO_MANY_REDIRECTS') return; - reject(enhanceError(err, config, null, req)); - }); + function processQueue() { + if (canceled) return + if (readyTasks.length === 0 && runningTasks === 0) { + return callback(null, results); + } + while(readyTasks.length && runningTasks < concurrency) { + var run = readyTasks.shift(); + run(); + } - // Handle request timeout - if (config.timeout) { - // Sometime, the response will be very slow, and does not respond, the connect event will be block by event loop system. - // And timer callback will be fired, and abort() will be invoked before connection, then get "socket hang up" and code ECONNRESET. - // At this time, if we have a large number of request, nodejs will hang up some socket on background. and the number will up and up. - // And then these socket which be hang up will devoring CPU little by little. - // ClientRequest.setTimeout will be fired on the specify milliseconds, and can make sure that abort() will be fired after connect. - req.setTimeout(config.timeout, function handleRequestTimeout() { - req.abort(); - reject(createError('timeout of ' + config.timeout + 'ms exceeded', config, 'ECONNABORTED', req)); - }); - } + } - if (config.cancelToken) { - // Handle cancellation - config.cancelToken.promise.then(function onCanceled(cancel) { - if (req.aborted) return; + function addListener(taskName, fn) { + var taskListeners = listeners[taskName]; + if (!taskListeners) { + taskListeners = listeners[taskName] = []; + } - req.abort(); - reject(cancel); - }); - } + taskListeners.push(fn); + } - // Send the request - if (utils.isStream(data)) { - data.on('error', function handleStreamError(err) { - reject(enhanceError(err, config, null, req)); - }).pipe(req); - } else { - req.end(data); - } - }); -}; + function taskComplete(taskName) { + var taskListeners = listeners[taskName] || []; + taskListeners.forEach(fn => fn()); + processQueue(); + } -/***/ }), + function runTask(key, task) { + if (hasError) return; -/***/ 3454: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + var taskCallback = onlyOnce((err, ...result) => { + runningTasks--; + if (err === false) { + canceled = true; + return + } + if (result.length < 2) { + [result] = result; + } + if (err) { + var safeResults = {}; + Object.keys(results).forEach(rkey => { + safeResults[rkey] = results[rkey]; + }); + safeResults[key] = result; + hasError = true; + listeners = Object.create(null); + if (canceled) return + callback(err, safeResults); + } else { + results[key] = result; + taskComplete(key); + } + }); -"use strict"; + runningTasks++; + var taskFn = wrapAsync(task[task.length - 1]); + if (task.length > 1) { + taskFn(results, taskCallback); + } else { + taskFn(taskCallback); + } + } + function checkForDeadlocks() { + // Kahn's algorithm + // https://en.wikipedia.org/wiki/Topological_sorting#Kahn.27s_algorithm + // http://connalle.blogspot.com/2013/10/topological-sortingkahn-algorithm.html + var currentTask; + var counter = 0; + while (readyToCheck.length) { + currentTask = readyToCheck.pop(); + counter++; + getDependents(currentTask).forEach(dependent => { + if (--uncheckedDependencies[dependent] === 0) { + readyToCheck.push(dependent); + } + }); + } -var utils = __nccwpck_require__(20328); -var settle = __nccwpck_require__(13211); -var cookies = __nccwpck_require__(21545); -var buildURL = __nccwpck_require__(30646); -var buildFullPath = __nccwpck_require__(41934); -var parseHeaders = __nccwpck_require__(86455); -var isURLSameOrigin = __nccwpck_require__(33608); -var createError = __nccwpck_require__(15226); + if (counter !== numTasks) { + throw new Error( + 'async.auto cannot execute tasks due to a recursive dependency' + ); + } + } -module.exports = function xhrAdapter(config) { - return new Promise(function dispatchXhrRequest(resolve, reject) { - var requestData = config.data; - var requestHeaders = config.headers; + function getDependents(taskName) { + var result = []; + Object.keys(tasks).forEach(key => { + const task = tasks[key]; + if (Array.isArray(task) && task.indexOf(taskName) >= 0) { + result.push(key); + } + }); + return result; + } - if (utils.isFormData(requestData)) { - delete requestHeaders['Content-Type']; // Let the browser set it + return callback[PROMISE_SYMBOL] } - var request = new XMLHttpRequest(); + var FN_ARGS = /^(?:async\s+)?(?:function)?\s*\w*\s*\(\s*([^)]+)\s*\)(?:\s*{)/; + var ARROW_FN_ARGS = /^(?:async\s+)?\(?\s*([^)=]+)\s*\)?(?:\s*=>)/; + var FN_ARG_SPLIT = /,/; + var FN_ARG = /(=.+)?(\s*)$/; + var STRIP_COMMENTS = /((\/\/.*$)|(\/\*[\s\S]*?\*\/))/mg; - // HTTP basic authentication - if (config.auth) { - var username = config.auth.username || ''; - var password = config.auth.password ? unescape(encodeURIComponent(config.auth.password)) : ''; - requestHeaders.Authorization = 'Basic ' + btoa(username + ':' + password); + function parseParams(func) { + const src = func.toString().replace(STRIP_COMMENTS, ''); + let match = src.match(FN_ARGS); + if (!match) { + match = src.match(ARROW_FN_ARGS); + } + if (!match) throw new Error('could not parse args in autoInject\nSource:\n' + src) + let [, args] = match; + return args + .replace(/\s/g, '') + .split(FN_ARG_SPLIT) + .map((arg) => arg.replace(FN_ARG, '').trim()); } - var fullPath = buildFullPath(config.baseURL, config.url); - request.open(config.method.toUpperCase(), buildURL(fullPath, config.params, config.paramsSerializer), true); + /** + * A dependency-injected version of the [async.auto]{@link module:ControlFlow.auto} function. Dependent + * tasks are specified as parameters to the function, after the usual callback + * parameter, with the parameter names matching the names of the tasks it + * depends on. This can provide even more readable task graphs which can be + * easier to maintain. + * + * If a final callback is specified, the task results are similarly injected, + * specified as named parameters after the initial error parameter. + * + * The autoInject function is purely syntactic sugar and its semantics are + * otherwise equivalent to [async.auto]{@link module:ControlFlow.auto}. + * + * @name autoInject + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.auto]{@link module:ControlFlow.auto} + * @category Control Flow + * @param {Object} tasks - An object, each of whose properties is an {@link AsyncFunction} of + * the form 'func([dependencies...], callback). The object's key of a property + * serves as the name of the task defined by that property, i.e. can be used + * when specifying requirements for other tasks. + * * The `callback` parameter is a `callback(err, result)` which must be called + * when finished, passing an `error` (which can be `null`) and the result of + * the function's execution. The remaining parameters name other tasks on + * which the task is dependent, and the results from those tasks are the + * arguments of those parameters. + * @param {Function} [callback] - An optional callback which is called when all + * the tasks have been completed. It receives the `err` argument if any `tasks` + * pass an error to their callback, and a `results` object with any completed + * task results, similar to `auto`. + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // The example from `auto` can be rewritten as follows: + * async.autoInject({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: function(get_data, make_folder, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }, + * email_link: function(write_file, callback) { + * // once the file is written let's email a link to it... + * // write_file contains the filename returned by write_file. + * callback(null, {'file':write_file, 'email':'user@example.com'}); + * } + * }, function(err, results) { + * console.log('err = ', err); + * console.log('email_link = ', results.email_link); + * }); + * + * // If you are using a JS minifier that mangles parameter names, `autoInject` + * // will not work with plain functions, since the parameter names will be + * // collapsed to a single letter identifier. To work around this, you can + * // explicitly specify the names of the parameters your task function needs + * // in an array, similar to Angular.js dependency injection. + * + * // This still has an advantage over plain `auto`, since the results a task + * // depends on are still spread into arguments. + * async.autoInject({ + * //... + * write_file: ['get_data', 'make_folder', function(get_data, make_folder, callback) { + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(write_file, callback) { + * callback(null, {'file':write_file, 'email':'user@example.com'}); + * }] + * //... + * }, function(err, results) { + * console.log('err = ', err); + * console.log('email_link = ', results.email_link); + * }); + */ + function autoInject(tasks, callback) { + var newTasks = {}; - // Set the request timeout in MS - request.timeout = config.timeout; + Object.keys(tasks).forEach(key => { + var taskFn = tasks[key]; + var params; + var fnIsAsync = isAsync(taskFn); + var hasNoDeps = + (!fnIsAsync && taskFn.length === 1) || + (fnIsAsync && taskFn.length === 0); - // Listen for ready state - request.onreadystatechange = function handleLoad() { - if (!request || request.readyState !== 4) { - return; - } + if (Array.isArray(taskFn)) { + params = [...taskFn]; + taskFn = params.pop(); - // The request errored out and we didn't get a response, this will be - // handled by onerror instead - // With one exception: request that using file: protocol, most browsers - // will return status as 0 even though it's a successful request - if (request.status === 0 && !(request.responseURL && request.responseURL.indexOf('file:') === 0)) { - return; - } + newTasks[key] = params.concat(params.length > 0 ? newTask : taskFn); + } else if (hasNoDeps) { + // no dependencies, use the function as-is + newTasks[key] = taskFn; + } else { + params = parseParams(taskFn); + if ((taskFn.length === 0 && !fnIsAsync) && params.length === 0) { + throw new Error("autoInject task functions require explicit parameters."); + } - // Prepare the response - var responseHeaders = 'getAllResponseHeaders' in request ? parseHeaders(request.getAllResponseHeaders()) : null; - var responseData = !config.responseType || config.responseType === 'text' ? request.responseText : request.response; - var response = { - data: responseData, - status: request.status, - statusText: request.statusText, - headers: responseHeaders, - config: config, - request: request - }; + // remove callback param + if (!fnIsAsync) params.pop(); - settle(resolve, reject, response); + newTasks[key] = params.concat(newTask); + } - // Clean up request - request = null; - }; + function newTask(results, taskCb) { + var newArgs = params.map(name => results[name]); + newArgs.push(taskCb); + wrapAsync(taskFn)(...newArgs); + } + }); - // Handle browser request cancellation (as opposed to a manual cancellation) - request.onabort = function handleAbort() { - if (!request) { - return; - } + return auto(newTasks, callback); + } - reject(createError('Request aborted', config, 'ECONNABORTED', request)); + // Simple doubly linked list (https://en.wikipedia.org/wiki/Doubly_linked_list) implementation + // used for queues. This implementation assumes that the node provided by the user can be modified + // to adjust the next and last properties. We implement only the minimal functionality + // for queue support. + class DLL { + constructor() { + this.head = this.tail = null; + this.length = 0; + } - // Clean up request - request = null; - }; + removeLink(node) { + if (node.prev) node.prev.next = node.next; + else this.head = node.next; + if (node.next) node.next.prev = node.prev; + else this.tail = node.prev; - // Handle low level network errors - request.onerror = function handleError() { - // Real errors are hidden from us by the browser - // onerror should only fire if it's a network error - reject(createError('Network Error', config, null, request)); + node.prev = node.next = null; + this.length -= 1; + return node; + } - // Clean up request - request = null; - }; + empty () { + while(this.head) this.shift(); + return this; + } - // Handle timeout - request.ontimeout = function handleTimeout() { - var timeoutErrorMessage = 'timeout of ' + config.timeout + 'ms exceeded'; - if (config.timeoutErrorMessage) { - timeoutErrorMessage = config.timeoutErrorMessage; - } - reject(createError(timeoutErrorMessage, config, 'ECONNABORTED', - request)); + insertAfter(node, newNode) { + newNode.prev = node; + newNode.next = node.next; + if (node.next) node.next.prev = newNode; + else this.tail = newNode; + node.next = newNode; + this.length += 1; + } - // Clean up request - request = null; - }; + insertBefore(node, newNode) { + newNode.prev = node.prev; + newNode.next = node; + if (node.prev) node.prev.next = newNode; + else this.head = newNode; + node.prev = newNode; + this.length += 1; + } - // Add xsrf header - // This is only done if running in a standard browser environment. - // Specifically not if we're in a web worker, or react-native. - if (utils.isStandardBrowserEnv()) { - // Add xsrf header - var xsrfValue = (config.withCredentials || isURLSameOrigin(fullPath)) && config.xsrfCookieName ? - cookies.read(config.xsrfCookieName) : - undefined; + unshift(node) { + if (this.head) this.insertBefore(this.head, node); + else setInitial(this, node); + } - if (xsrfValue) { - requestHeaders[config.xsrfHeaderName] = xsrfValue; - } - } + push(node) { + if (this.tail) this.insertAfter(this.tail, node); + else setInitial(this, node); + } - // Add headers to the request - if ('setRequestHeader' in request) { - utils.forEach(requestHeaders, function setRequestHeader(val, key) { - if (typeof requestData === 'undefined' && key.toLowerCase() === 'content-type') { - // Remove Content-Type if data is undefined - delete requestHeaders[key]; - } else { - // Otherwise add header to the request - request.setRequestHeader(key, val); + shift() { + return this.head && this.removeLink(this.head); } - }); - } - // Add withCredentials to request if needed - if (!utils.isUndefined(config.withCredentials)) { - request.withCredentials = !!config.withCredentials; - } + pop() { + return this.tail && this.removeLink(this.tail); + } - // Add responseType to request if needed - if (config.responseType) { - try { - request.responseType = config.responseType; - } catch (e) { - // Expected DOMException thrown by browsers not compatible XMLHttpRequest Level 2. - // But, this can be suppressed for 'json' type as it can be parsed by default 'transformResponse' function. - if (config.responseType !== 'json') { - throw e; + toArray() { + return [...this] } - } - } - // Handle progress if needed - if (typeof config.onDownloadProgress === 'function') { - request.addEventListener('progress', config.onDownloadProgress); + *[Symbol.iterator] () { + var cur = this.head; + while (cur) { + yield cur.data; + cur = cur.next; + } + } + + remove (testFn) { + var curr = this.head; + while(curr) { + var {next} = curr; + if (testFn(curr)) { + this.removeLink(curr); + } + curr = next; + } + return this; + } } - // Not all browsers support upload events - if (typeof config.onUploadProgress === 'function' && request.upload) { - request.upload.addEventListener('progress', config.onUploadProgress); + function setInitial(dll, node) { + dll.length = 1; + dll.head = dll.tail = node; } - if (config.cancelToken) { - // Handle cancellation - config.cancelToken.promise.then(function onCanceled(cancel) { - if (!request) { - return; + function queue(worker, concurrency, payload) { + if (concurrency == null) { + concurrency = 1; + } + else if(concurrency === 0) { + throw new RangeError('Concurrency must not be zero'); } - request.abort(); - reject(cancel); - // Clean up request - request = null; - }); - } + var _worker = wrapAsync(worker); + var numRunning = 0; + var workersList = []; + const events = { + error: [], + drain: [], + saturated: [], + unsaturated: [], + empty: [] + }; - if (!requestData) { - requestData = null; - } + function on (event, handler) { + events[event].push(handler); + } - // Send the request - request.send(requestData); - }); -}; + function once (event, handler) { + const handleAndRemove = (...args) => { + off(event, handleAndRemove); + handler(...args); + }; + events[event].push(handleAndRemove); + } + function off (event, handler) { + if (!event) return Object.keys(events).forEach(ev => events[ev] = []) + if (!handler) return events[event] = [] + events[event] = events[event].filter(ev => ev !== handler); + } -/***/ }), + function trigger (event, ...args) { + events[event].forEach(handler => handler(...args)); + } -/***/ 52618: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + var processingScheduled = false; + function _insert(data, insertAtFront, rejectOnError, callback) { + if (callback != null && typeof callback !== 'function') { + throw new Error('task callback must be a function'); + } + q.started = true; -"use strict"; + var res, rej; + function promiseCallback (err, ...args) { + // we don't care about the error, let the global error handler + // deal with it + if (err) return rejectOnError ? rej(err) : res() + if (args.length <= 1) return res(args[0]) + res(args); + } + var item = { + data, + callback: rejectOnError ? + promiseCallback : + (callback || promiseCallback) + }; -var utils = __nccwpck_require__(20328); -var bind = __nccwpck_require__(77065); -var Axios = __nccwpck_require__(98178); -var mergeConfig = __nccwpck_require__(74831); -var defaults = __nccwpck_require__(98190); + if (insertAtFront) { + q._tasks.unshift(item); + } else { + q._tasks.push(item); + } -/** - * Create an instance of Axios - * - * @param {Object} defaultConfig The default config for the instance - * @return {Axios} A new instance of Axios - */ -function createInstance(defaultConfig) { - var context = new Axios(defaultConfig); - var instance = bind(Axios.prototype.request, context); + if (!processingScheduled) { + processingScheduled = true; + setImmediate$1(() => { + processingScheduled = false; + q.process(); + }); + } - // Copy axios.prototype to instance - utils.extend(instance, Axios.prototype, context); + if (rejectOnError || !callback) { + return new Promise((resolve, reject) => { + res = resolve; + rej = reject; + }) + } + } - // Copy context to instance - utils.extend(instance, context); + function _createCB(tasks) { + return function (err, ...args) { + numRunning -= 1; - return instance; -} + for (var i = 0, l = tasks.length; i < l; i++) { + var task = tasks[i]; -// Create the default instance to be exported -var axios = createInstance(defaults); + var index = workersList.indexOf(task); + if (index === 0) { + workersList.shift(); + } else if (index > 0) { + workersList.splice(index, 1); + } -// Expose Axios class to allow class inheritance -axios.Axios = Axios; - -// Factory for creating new instances -axios.create = function create(instanceConfig) { - return createInstance(mergeConfig(axios.defaults, instanceConfig)); -}; - -// Expose Cancel & CancelToken -axios.Cancel = __nccwpck_require__(98875); -axios.CancelToken = __nccwpck_require__(71587); -axios.isCancel = __nccwpck_require__(64057); - -// Expose all/spread -axios.all = function all(promises) { - return Promise.all(promises); -}; -axios.spread = __nccwpck_require__(74850); - -// Expose isAxiosError -axios.isAxiosError = __nccwpck_require__(60650); - -module.exports = axios; - -// Allow use of default import syntax in TypeScript -module.exports.default = axios; + task.callback(err, ...args); + if (err != null) { + trigger('error', err, task.data); + } + } -/***/ }), + if (numRunning <= (q.concurrency - q.buffer) ) { + trigger('unsaturated'); + } -/***/ 98875: -/***/ ((module) => { + if (q.idle()) { + trigger('drain'); + } + q.process(); + }; + } -"use strict"; + function _maybeDrain(data) { + if (data.length === 0 && q.idle()) { + // call drain immediately if there are no tasks + setImmediate$1(() => trigger('drain')); + return true + } + return false + } + const eventMethod = (name) => (handler) => { + if (!handler) { + return new Promise((resolve, reject) => { + once(name, (err, data) => { + if (err) return reject(err) + resolve(data); + }); + }) + } + off(name); + on(name, handler); -/** - * A `Cancel` is an object that is thrown when an operation is canceled. - * - * @class - * @param {string=} message The message. - */ -function Cancel(message) { - this.message = message; -} + }; -Cancel.prototype.toString = function toString() { - return 'Cancel' + (this.message ? ': ' + this.message : ''); -}; + var isProcessing = false; + var q = { + _tasks: new DLL(), + *[Symbol.iterator] () { + yield* q._tasks[Symbol.iterator](); + }, + concurrency, + payload, + buffer: concurrency / 4, + started: false, + paused: false, + push (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, false, false, callback)) + } + return _insert(data, false, false, callback); + }, + pushAsync (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, false, true, callback)) + } + return _insert(data, false, true, callback); + }, + kill () { + off(); + q._tasks.empty(); + }, + unshift (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, true, false, callback)) + } + return _insert(data, true, false, callback); + }, + unshiftAsync (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, true, true, callback)) + } + return _insert(data, true, true, callback); + }, + remove (testFn) { + q._tasks.remove(testFn); + }, + process () { + // Avoid trying to start too many processing operations. This can occur + // when callbacks resolve synchronously (#1267). + if (isProcessing) { + return; + } + isProcessing = true; + while(!q.paused && numRunning < q.concurrency && q._tasks.length){ + var tasks = [], data = []; + var l = q._tasks.length; + if (q.payload) l = Math.min(l, q.payload); + for (var i = 0; i < l; i++) { + var node = q._tasks.shift(); + tasks.push(node); + workersList.push(node); + data.push(node.data); + } -Cancel.prototype.__CANCEL__ = true; + numRunning += 1; -module.exports = Cancel; + if (q._tasks.length === 0) { + trigger('empty'); + } + if (numRunning === q.concurrency) { + trigger('saturated'); + } -/***/ }), + var cb = onlyOnce(_createCB(tasks)); + _worker(data, cb); + } + isProcessing = false; + }, + length () { + return q._tasks.length; + }, + running () { + return numRunning; + }, + workersList () { + return workersList; + }, + idle() { + return q._tasks.length + numRunning === 0; + }, + pause () { + q.paused = true; + }, + resume () { + if (q.paused === false) { return; } + q.paused = false; + setImmediate$1(q.process); + } + }; + // define these as fixed properties, so people get useful errors when updating + Object.defineProperties(q, { + saturated: { + writable: false, + value: eventMethod('saturated') + }, + unsaturated: { + writable: false, + value: eventMethod('unsaturated') + }, + empty: { + writable: false, + value: eventMethod('empty') + }, + drain: { + writable: false, + value: eventMethod('drain') + }, + error: { + writable: false, + value: eventMethod('error') + }, + }); + return q; + } -/***/ 71587: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * Creates a `cargo` object with the specified payload. Tasks added to the + * cargo will be processed altogether (up to the `payload` limit). If the + * `worker` is in progress, the task is queued until it becomes available. Once + * the `worker` has completed some tasks, each callback of those tasks is + * called. Check out [these](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) [animations](https://camo.githubusercontent.com/f4810e00e1c5f5f8addbe3e9f49064fd5d102699/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130312f38346339323036362d356632392d313165322d383134662d3964336430323431336266642e676966) + * for how `cargo` and `queue` work. + * + * While [`queue`]{@link module:ControlFlow.queue} passes only one task to one of a group of workers + * at a time, cargo passes an array of tasks to a single worker, repeating + * when the worker is finished. + * + * @name cargo + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.queue]{@link module:ControlFlow.queue} + * @category Control Flow + * @param {AsyncFunction} worker - An asynchronous function for processing an array + * of queued tasks. Invoked with `(tasks, callback)`. + * @param {number} [payload=Infinity] - An optional `integer` for determining + * how many tasks should be processed per round; if omitted, the default is + * unlimited. + * @returns {module:ControlFlow.QueueObject} A cargo object to manage the tasks. Callbacks can + * attached as certain properties to listen for specific events during the + * lifecycle of the cargo and inner queue. + * @example + * + * // create a cargo object with payload 2 + * var cargo = async.cargo(function(tasks, callback) { + * for (var i=0; i { + _iteratee(memo, x, (err, v) => { + memo = v; + iterCb(err); + }); + }, err => callback(err, memo)); + } + var reduce$1 = awaitify(reduce, 4); -var Cancel = __nccwpck_require__(98875); + /** + * Version of the compose function that is more natural to read. Each function + * consumes the return value of the previous function. It is the equivalent of + * [compose]{@link module:ControlFlow.compose} with the arguments reversed. + * + * Each function is executed with the `this` binding of the composed function. + * + * @name seq + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.compose]{@link module:ControlFlow.compose} + * @category Control Flow + * @param {...AsyncFunction} functions - the asynchronous functions to compose + * @returns {Function} a function that composes the `functions` in order + * @example + * + * // Requires lodash (or underscore), express3 and dresende's orm2. + * // Part of an app, that fetches cats of the logged user. + * // This example uses `seq` function to avoid overnesting and error + * // handling clutter. + * app.get('/cats', function(request, response) { + * var User = request.models.User; + * async.seq( + * _.bind(User.get, User), // 'User.get' has signature (id, callback(err, data)) + * function(user, fn) { + * user.getCats(fn); // 'getCats' has signature (callback(err, data)) + * } + * )(req.session.user_id, function (err, cats) { + * if (err) { + * console.error(err); + * response.json({ status: 'error', message: err.message }); + * } else { + * response.json({ status: 'ok', message: 'Cats found', data: cats }); + * } + * }); + * }); + */ + function seq(...functions) { + var _functions = functions.map(wrapAsync); + return function (...args) { + var that = this; -/** - * A `CancelToken` is an object that can be used to request cancellation of an operation. - * - * @class - * @param {Function} executor The executor function. - */ -function CancelToken(executor) { - if (typeof executor !== 'function') { - throw new TypeError('executor must be a function.'); - } + var cb = args[args.length - 1]; + if (typeof cb == 'function') { + args.pop(); + } else { + cb = promiseCallback(); + } - var resolvePromise; - this.promise = new Promise(function promiseExecutor(resolve) { - resolvePromise = resolve; - }); + reduce$1(_functions, args, (newargs, fn, iterCb) => { + fn.apply(that, newargs.concat((err, ...nextargs) => { + iterCb(err, nextargs); + })); + }, + (err, results) => cb(err, ...results)); - var token = this; - executor(function cancel(message) { - if (token.reason) { - // Cancellation has already been requested - return; + return cb[PROMISE_SYMBOL] + }; } - token.reason = new Cancel(message); - resolvePromise(token.reason); - }); -} + /** + * Creates a function which is a composition of the passed asynchronous + * functions. Each function consumes the return value of the function that + * follows. Composing functions `f()`, `g()`, and `h()` would produce the result + * of `f(g(h()))`, only this version uses callbacks to obtain the return values. + * + * If the last argument to the composed function is not a function, a promise + * is returned when you call it. + * + * Each function is executed with the `this` binding of the composed function. + * + * @name compose + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {...AsyncFunction} functions - the asynchronous functions to compose + * @returns {Function} an asynchronous function that is the composed + * asynchronous `functions` + * @example + * + * function add1(n, callback) { + * setTimeout(function () { + * callback(null, n + 1); + * }, 10); + * } + * + * function mul3(n, callback) { + * setTimeout(function () { + * callback(null, n * 3); + * }, 10); + * } + * + * var add1mul3 = async.compose(mul3, add1); + * add1mul3(4, function (err, result) { + * // result now equals 15 + * }); + */ + function compose(...args) { + return seq(...args.reverse()); + } -/** - * Throws a `Cancel` if cancellation has been requested. - */ -CancelToken.prototype.throwIfRequested = function throwIfRequested() { - if (this.reason) { - throw this.reason; - } -}; + /** + * The same as [`map`]{@link module:Collections.map} but runs a maximum of `limit` async operations at a time. + * + * @name mapLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.map]{@link module:Collections.map} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ + function mapLimit (coll, limit, iteratee, callback) { + return _asyncMap(eachOfLimit(limit), coll, iteratee, callback) + } + var mapLimit$1 = awaitify(mapLimit, 4); -/** - * Returns an object that contains a new `CancelToken` and a function that, when called, - * cancels the `CancelToken`. - */ -CancelToken.source = function source() { - var cancel; - var token = new CancelToken(function executor(c) { - cancel = c; - }); - return { - token: token, - cancel: cancel - }; -}; + /** + * The same as [`concat`]{@link module:Collections.concat} but runs a maximum of `limit` async operations at a time. + * + * @name concatLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.concat]{@link module:Collections.concat} + * @category Collection + * @alias flatMapLimit + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, + * which should use an array as its result. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + */ + function concatLimit(coll, limit, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return mapLimit$1(coll, limit, (val, iterCb) => { + _iteratee(val, (err, ...args) => { + if (err) return iterCb(err); + return iterCb(err, args); + }); + }, (err, mapResults) => { + var result = []; + for (var i = 0; i < mapResults.length; i++) { + if (mapResults[i]) { + result = result.concat(...mapResults[i]); + } + } -module.exports = CancelToken; + return callback(err, result); + }); + } + var concatLimit$1 = awaitify(concatLimit, 4); + /** + * Applies `iteratee` to each item in `coll`, concatenating the results. Returns + * the concatenated list. The `iteratee`s are called in parallel, and the + * results are concatenated as they return. The results array will be returned in + * the original order of `coll` passed to the `iteratee` function. + * + * @name concat + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @alias flatMap + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, + * which should use an array as its result. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + * @example + * + * async.concat(['dir1','dir2','dir3'], fs.readdir, function(err, files) { + * // files is now a list of filenames that exist in the 3 directories + * }); + */ + function concat(coll, iteratee, callback) { + return concatLimit$1(coll, Infinity, iteratee, callback) + } + var concat$1 = awaitify(concat, 3); -/***/ }), + /** + * The same as [`concat`]{@link module:Collections.concat} but runs only a single async operation at a time. + * + * @name concatSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.concat]{@link module:Collections.concat} + * @category Collection + * @alias flatMapSeries + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`. + * The iteratee should complete with an array an array of results. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + */ + function concatSeries(coll, iteratee, callback) { + return concatLimit$1(coll, 1, iteratee, callback) + } + var concatSeries$1 = awaitify(concatSeries, 3); -/***/ 64057: -/***/ ((module) => { + /** + * Returns a function that when called, calls-back with the values provided. + * Useful as the first function in a [`waterfall`]{@link module:ControlFlow.waterfall}, or for plugging values in to + * [`auto`]{@link module:ControlFlow.auto}. + * + * @name constant + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {...*} arguments... - Any number of arguments to automatically invoke + * callback with. + * @returns {AsyncFunction} Returns a function that when invoked, automatically + * invokes the callback with the previous given arguments. + * @example + * + * async.waterfall([ + * async.constant(42), + * function (value, next) { + * // value === 42 + * }, + * //... + * ], callback); + * + * async.waterfall([ + * async.constant(filename, "utf8"), + * fs.readFile, + * function (fileData, next) { + * //... + * } + * //... + * ], callback); + * + * async.auto({ + * hostname: async.constant("https://server.net/"), + * port: findFreePort, + * launchServer: ["hostname", "port", function (options, cb) { + * startServer(options, cb); + * }], + * //... + * }, callback); + */ + function constant(...args) { + return function (...ignoredArgs/*, callback*/) { + var callback = ignoredArgs.pop(); + return callback(null, ...args); + }; + } -"use strict"; + function _createTester(check, getResult) { + return (eachfn, arr, _iteratee, cb) => { + var testPassed = false; + var testResult; + const iteratee = wrapAsync(_iteratee); + eachfn(arr, (value, _, callback) => { + iteratee(value, (err, result) => { + if (err || err === false) return callback(err); + if (check(result) && !testResult) { + testPassed = true; + testResult = getResult(true, value); + return callback(null, breakLoop); + } + callback(); + }); + }, err => { + if (err) return cb(err); + cb(null, testPassed ? testResult : getResult(false)); + }); + }; + } -module.exports = function isCancel(value) { - return !!(value && value.__CANCEL__); -}; + /** + * Returns the first value in `coll` that passes an async truth test. The + * `iteratee` is applied in parallel, meaning the first iteratee to return + * `true` will fire the detect `callback` with that result. That means the + * result might not be the first item in the original `coll` (in terms of order) + * that passes the test. + * If order within the original `coll` is important, then look at + * [`detectSeries`]{@link module:Collections.detectSeries}. + * + * @name detect + * @static + * @memberOf module:Collections + * @method + * @alias find + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns A Promise, if no callback is passed + * @example + * + * async.detect(['file1','file2','file3'], function(filePath, callback) { + * fs.access(filePath, function(err) { + * callback(null, !err) + * }); + * }, function(err, result) { + * // result now equals the first file in the list that exists + * }); + */ + function detect(coll, iteratee, callback) { + return _createTester(bool => bool, (res, item) => item)(eachOf$1, coll, iteratee, callback) + } + var detect$1 = awaitify(detect, 3); -/***/ }), + /** + * The same as [`detect`]{@link module:Collections.detect} but runs a maximum of `limit` async operations at a + * time. + * + * @name detectLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.detect]{@link module:Collections.detect} + * @alias findLimit + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns a Promise if no callback is passed + */ + function detectLimit(coll, limit, iteratee, callback) { + return _createTester(bool => bool, (res, item) => item)(eachOfLimit(limit), coll, iteratee, callback) + } + var detectLimit$1 = awaitify(detectLimit, 4); -/***/ 98178: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * The same as [`detect`]{@link module:Collections.detect} but runs only a single async operation at a time. + * + * @name detectSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.detect]{@link module:Collections.detect} + * @alias findSeries + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns a Promise if no callback is passed + */ + function detectSeries(coll, iteratee, callback) { + return _createTester(bool => bool, (res, item) => item)(eachOfLimit(1), coll, iteratee, callback) + } -"use strict"; + var detectSeries$1 = awaitify(detectSeries, 3); + function consoleFunc(name) { + return (fn, ...args) => wrapAsync(fn)(...args, (err, ...resultArgs) => { + if (typeof console === 'object') { + if (err) { + if (console.error) { + console.error(err); + } + } else if (console[name]) { + resultArgs.forEach(x => console[name](x)); + } + } + }) + } -var utils = __nccwpck_require__(20328); -var buildURL = __nccwpck_require__(30646); -var InterceptorManager = __nccwpck_require__(3214); -var dispatchRequest = __nccwpck_require__(85062); -var mergeConfig = __nccwpck_require__(74831); + /** + * Logs the result of an [`async` function]{@link AsyncFunction} to the + * `console` using `console.dir` to display the properties of the resulting object. + * Only works in Node.js or in browsers that support `console.dir` and + * `console.error` (such as FF and Chrome). + * If multiple arguments are returned from the async function, + * `console.dir` is called on each argument in order. + * + * @name dir + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} function - The function you want to eventually apply + * all arguments to. + * @param {...*} arguments... - Any number of arguments to apply to the function. + * @example + * + * // in a module + * var hello = function(name, callback) { + * setTimeout(function() { + * callback(null, {hello: name}); + * }, 1000); + * }; + * + * // in the node repl + * node> async.dir(hello, 'world'); + * {hello: 'world'} + */ + var dir = consoleFunc('dir'); -/** - * Create a new instance of Axios - * - * @param {Object} instanceConfig The default config for the instance - */ -function Axios(instanceConfig) { - this.defaults = instanceConfig; - this.interceptors = { - request: new InterceptorManager(), - response: new InterceptorManager() - }; -} + /** + * The post-check version of [`whilst`]{@link module:ControlFlow.whilst}. To reflect the difference in + * the order of operations, the arguments `test` and `iteratee` are switched. + * + * `doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript. + * + * @name doWhilst + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.whilst]{@link module:ControlFlow.whilst} + * @category Control Flow + * @param {AsyncFunction} iteratee - A function which is called each time `test` + * passes. Invoked with (callback). + * @param {AsyncFunction} test - asynchronous truth test to perform after each + * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the + * non-error args from the previous callback of `iteratee`. + * @param {Function} [callback] - A callback which is called after the test + * function has failed and repeated execution of `iteratee` has stopped. + * `callback` will be passed an error and any arguments passed to the final + * `iteratee`'s callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + */ + function doWhilst(iteratee, test, callback) { + callback = onlyOnce(callback); + var _fn = wrapAsync(iteratee); + var _test = wrapAsync(test); + var results; -/** - * Dispatch a request - * - * @param {Object} config The config specific for this request (merged with this.defaults) - */ -Axios.prototype.request = function request(config) { - /*eslint no-param-reassign:0*/ - // Allow for axios('example/url'[, config]) a la fetch API - if (typeof config === 'string') { - config = arguments[1] || {}; - config.url = arguments[0]; - } else { - config = config || {}; - } + function next(err, ...args) { + if (err) return callback(err); + if (err === false) return; + results = args; + _test(...args, check); + } - config = mergeConfig(this.defaults, config); + function check(err, truth) { + if (err) return callback(err); + if (err === false) return; + if (!truth) return callback(null, ...results); + _fn(next); + } - // Set config.method - if (config.method) { - config.method = config.method.toLowerCase(); - } else if (this.defaults.method) { - config.method = this.defaults.method.toLowerCase(); - } else { - config.method = 'get'; - } + return check(null, true); + } - // Hook up interceptors middleware - var chain = [dispatchRequest, undefined]; - var promise = Promise.resolve(config); + var doWhilst$1 = awaitify(doWhilst, 3); - this.interceptors.request.forEach(function unshiftRequestInterceptors(interceptor) { - chain.unshift(interceptor.fulfilled, interceptor.rejected); - }); + /** + * Like ['doWhilst']{@link module:ControlFlow.doWhilst}, except the `test` is inverted. Note the + * argument ordering differs from `until`. + * + * @name doUntil + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.doWhilst]{@link module:ControlFlow.doWhilst} + * @category Control Flow + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` fails. Invoked with (callback). + * @param {AsyncFunction} test - asynchronous truth test to perform after each + * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the + * non-error args from the previous callback of `iteratee` + * @param {Function} [callback] - A callback which is called after the test + * function has passed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + */ + function doUntil(iteratee, test, callback) { + const _test = wrapAsync(test); + return doWhilst$1(iteratee, (...args) => { + const cb = args.pop(); + _test(...args, (err, truth) => cb (err, !truth)); + }, callback); + } - this.interceptors.response.forEach(function pushResponseInterceptors(interceptor) { - chain.push(interceptor.fulfilled, interceptor.rejected); - }); + function _withoutIndex(iteratee) { + return (value, index, callback) => iteratee(value, callback); + } - while (chain.length) { - promise = promise.then(chain.shift(), chain.shift()); - } + /** + * Applies the function `iteratee` to each item in `coll`, in parallel. + * The `iteratee` is called with an item from the list, and a callback for when + * it has finished. If the `iteratee` passes an error to its `callback`, the + * main `callback` (for the `each` function) is immediately called with the + * error. + * + * Note, that since this function applies `iteratee` to each item in parallel, + * there is no guarantee that the iteratee functions will complete in order. + * + * @name each + * @static + * @memberOf module:Collections + * @method + * @alias forEach + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to + * each item in `coll`. Invoked with (item, callback). + * The array index is not passed to the iteratee. + * If you need the index, use `eachOf`. + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // assuming openFiles is an array of file names and saveFile is a function + * // to save the modified contents of that file: + * + * async.each(openFiles, saveFile, function(err){ + * // if any of the saves produced an error, err would equal that error + * }); + * + * // assuming openFiles is an array of file names + * async.each(openFiles, function(file, callback) { + * + * // Perform operation on file here. + * console.log('Processing file ' + file); + * + * if( file.length > 32 ) { + * console.log('This file name is too long'); + * callback('File name too long'); + * } else { + * // Do work to process file here + * console.log('File processed'); + * callback(); + * } + * }, function(err) { + * // if any of the file processing produced an error, err would equal that error + * if( err ) { + * // One of the iterations produced an error. + * // All processing will now stop. + * console.log('A file failed to process'); + * } else { + * console.log('All files have been processed successfully'); + * } + * }); + */ + function eachLimit(coll, iteratee, callback) { + return eachOf$1(coll, _withoutIndex(wrapAsync(iteratee)), callback); + } - return promise; -}; + var each = awaitify(eachLimit, 3); -Axios.prototype.getUri = function getUri(config) { - config = mergeConfig(this.defaults, config); - return buildURL(config.url, config.params, config.paramsSerializer).replace(/^\?/, ''); -}; - -// Provide aliases for supported request methods -utils.forEach(['delete', 'get', 'head', 'options'], function forEachMethodNoData(method) { - /*eslint func-names:0*/ - Axios.prototype[method] = function(url, config) { - return this.request(mergeConfig(config || {}, { - method: method, - url: url, - data: (config || {}).data - })); - }; -}); - -utils.forEach(['post', 'put', 'patch'], function forEachMethodWithData(method) { - /*eslint func-names:0*/ - Axios.prototype[method] = function(url, data, config) { - return this.request(mergeConfig(config || {}, { - method: method, - url: url, - data: data - })); - }; -}); - -module.exports = Axios; - - -/***/ }), - -/***/ 3214: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var utils = __nccwpck_require__(20328); - -function InterceptorManager() { - this.handlers = []; -} - -/** - * Add a new interceptor to the stack - * - * @param {Function} fulfilled The function to handle `then` for a `Promise` - * @param {Function} rejected The function to handle `reject` for a `Promise` - * - * @return {Number} An ID used to remove interceptor later - */ -InterceptorManager.prototype.use = function use(fulfilled, rejected) { - this.handlers.push({ - fulfilled: fulfilled, - rejected: rejected - }); - return this.handlers.length - 1; -}; - -/** - * Remove an interceptor from the stack - * - * @param {Number} id The ID that was returned by `use` - */ -InterceptorManager.prototype.eject = function eject(id) { - if (this.handlers[id]) { - this.handlers[id] = null; - } -}; - -/** - * Iterate over all the registered interceptors - * - * This method is particularly useful for skipping over any - * interceptors that may have become `null` calling `eject`. - * - * @param {Function} fn The function to call for each interceptor - */ -InterceptorManager.prototype.forEach = function forEach(fn) { - utils.forEach(this.handlers, function forEachHandler(h) { - if (h !== null) { - fn(h); + /** + * The same as [`each`]{@link module:Collections.each} but runs a maximum of `limit` async operations at a time. + * + * @name eachLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.each]{@link module:Collections.each} + * @alias forEachLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The array index is not passed to the iteratee. + * If you need the index, use `eachOfLimit`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ + function eachLimit$1(coll, limit, iteratee, callback) { + return eachOfLimit(limit)(coll, _withoutIndex(wrapAsync(iteratee)), callback); } - }); -}; - -module.exports = InterceptorManager; - - -/***/ }), - -/***/ 41934: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var isAbsoluteURL = __nccwpck_require__(41301); -var combineURLs = __nccwpck_require__(57189); - -/** - * Creates a new URL by combining the baseURL with the requestedURL, - * only when the requestedURL is not already an absolute URL. - * If the requestURL is absolute, this function returns the requestedURL untouched. - * - * @param {string} baseURL The base URL - * @param {string} requestedURL Absolute or relative URL to combine - * @returns {string} The combined full path - */ -module.exports = function buildFullPath(baseURL, requestedURL) { - if (baseURL && !isAbsoluteURL(requestedURL)) { - return combineURLs(baseURL, requestedURL); - } - return requestedURL; -}; - - -/***/ }), - -/***/ 15226: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var enhanceError = __nccwpck_require__(21516); - -/** - * Create an Error with the specified message, config, error code, request and response. - * - * @param {string} message The error message. - * @param {Object} config The config. - * @param {string} [code] The error code (for example, 'ECONNABORTED'). - * @param {Object} [request] The request. - * @param {Object} [response] The response. - * @returns {Error} The created error. - */ -module.exports = function createError(message, config, code, request, response) { - var error = new Error(message); - return enhanceError(error, config, code, request, response); -}; - - -/***/ }), - -/***/ 85062: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var utils = __nccwpck_require__(20328); -var transformData = __nccwpck_require__(19812); -var isCancel = __nccwpck_require__(64057); -var defaults = __nccwpck_require__(98190); - -/** - * Throws a `Cancel` if cancellation has been requested. - */ -function throwIfCancellationRequested(config) { - if (config.cancelToken) { - config.cancelToken.throwIfRequested(); - } -} - -/** - * Dispatch a request to the server using the configured adapter. - * - * @param {object} config The config that is to be used for the request - * @returns {Promise} The Promise to be fulfilled - */ -module.exports = function dispatchRequest(config) { - throwIfCancellationRequested(config); - - // Ensure headers exist - config.headers = config.headers || {}; + var eachLimit$2 = awaitify(eachLimit$1, 4); - // Transform request data - config.data = transformData( - config.data, - config.headers, - config.transformRequest - ); + /** + * The same as [`each`]{@link module:Collections.each} but runs only a single async operation at a time. + * + * Note, that unlike [`each`]{@link module:Collections.each}, this function applies iteratee to each item + * in series and therefore the iteratee functions will complete in order. - // Flatten headers - config.headers = utils.merge( - config.headers.common || {}, - config.headers[config.method] || {}, - config.headers - ); + * @name eachSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.each]{@link module:Collections.each} + * @alias forEachSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each + * item in `coll`. + * The array index is not passed to the iteratee. + * If you need the index, use `eachOfSeries`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ + function eachSeries(coll, iteratee, callback) { + return eachLimit$2(coll, 1, iteratee, callback) + } + var eachSeries$1 = awaitify(eachSeries, 3); - utils.forEach( - ['delete', 'get', 'head', 'post', 'put', 'patch', 'common'], - function cleanHeaderConfig(method) { - delete config.headers[method]; + /** + * Wrap an async function and ensure it calls its callback on a later tick of + * the event loop. If the function already calls its callback on a next tick, + * no extra deferral is added. This is useful for preventing stack overflows + * (`RangeError: Maximum call stack size exceeded`) and generally keeping + * [Zalgo](http://blog.izs.me/post/59142742143/designing-apis-for-asynchrony) + * contained. ES2017 `async` functions are returned as-is -- they are immune + * to Zalgo's corrupting influences, as they always resolve on a later tick. + * + * @name ensureAsync + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - an async function, one that expects a node-style + * callback as its last argument. + * @returns {AsyncFunction} Returns a wrapped function with the exact same call + * signature as the function passed in. + * @example + * + * function sometimesAsync(arg, callback) { + * if (cache[arg]) { + * return callback(null, cache[arg]); // this would be synchronous!! + * } else { + * doSomeIO(arg, callback); // this IO would be asynchronous + * } + * } + * + * // this has a risk of stack overflows if many results are cached in a row + * async.mapSeries(args, sometimesAsync, done); + * + * // this will defer sometimesAsync's callback if necessary, + * // preventing stack overflows + * async.mapSeries(args, async.ensureAsync(sometimesAsync), done); + */ + function ensureAsync(fn) { + if (isAsync(fn)) return fn; + return function (...args/*, callback*/) { + var callback = args.pop(); + var sync = true; + args.push((...innerArgs) => { + if (sync) { + setImmediate$1(() => callback(...innerArgs)); + } else { + callback(...innerArgs); + } + }); + fn.apply(this, args); + sync = false; + }; } - ); - var adapter = config.adapter || defaults.adapter; + /** + * Returns `true` if every element in `coll` satisfies an async test. If any + * iteratee call returns `false`, the main `callback` is immediately called. + * + * @name every + * @static + * @memberOf module:Collections + * @method + * @alias all + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in parallel. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * async.every(['file1','file2','file3'], function(filePath, callback) { + * fs.access(filePath, function(err) { + * callback(null, !err) + * }); + * }, function(err, result) { + * // if result is true then every file exists + * }); + */ + function every(coll, iteratee, callback) { + return _createTester(bool => !bool, res => !res)(eachOf$1, coll, iteratee, callback) + } + var every$1 = awaitify(every, 3); - return adapter(config).then(function onAdapterResolution(response) { - throwIfCancellationRequested(config); + /** + * The same as [`every`]{@link module:Collections.every} but runs a maximum of `limit` async operations at a time. + * + * @name everyLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.every]{@link module:Collections.every} + * @alias allLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in parallel. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ + function everyLimit(coll, limit, iteratee, callback) { + return _createTester(bool => !bool, res => !res)(eachOfLimit(limit), coll, iteratee, callback) + } + var everyLimit$1 = awaitify(everyLimit, 4); - // Transform response data - response.data = transformData( - response.data, - response.headers, - config.transformResponse - ); + /** + * The same as [`every`]{@link module:Collections.every} but runs only a single async operation at a time. + * + * @name everySeries + * @static + * @memberOf module:Collections + * @method + * @see [async.every]{@link module:Collections.every} + * @alias allSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in series. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ + function everySeries(coll, iteratee, callback) { + return _createTester(bool => !bool, res => !res)(eachOfSeries$1, coll, iteratee, callback) + } + var everySeries$1 = awaitify(everySeries, 3); - return response; - }, function onAdapterRejection(reason) { - if (!isCancel(reason)) { - throwIfCancellationRequested(config); + function filterArray(eachfn, arr, iteratee, callback) { + var truthValues = new Array(arr.length); + eachfn(arr, (x, index, iterCb) => { + iteratee(x, (err, v) => { + truthValues[index] = !!v; + iterCb(err); + }); + }, err => { + if (err) return callback(err); + var results = []; + for (var i = 0; i < arr.length; i++) { + if (truthValues[i]) results.push(arr[i]); + } + callback(null, results); + }); + } - // Transform response data - if (reason && reason.response) { - reason.response.data = transformData( - reason.response.data, - reason.response.headers, - config.transformResponse - ); - } + function filterGeneric(eachfn, coll, iteratee, callback) { + var results = []; + eachfn(coll, (x, index, iterCb) => { + iteratee(x, (err, v) => { + if (err) return iterCb(err); + if (v) { + results.push({index, value: x}); + } + iterCb(err); + }); + }, err => { + if (err) return callback(err); + callback(null, results + .sort((a, b) => a.index - b.index) + .map(v => v.value)); + }); } - return Promise.reject(reason); - }); -}; + function _filter(eachfn, coll, iteratee, callback) { + var filter = isArrayLike(coll) ? filterArray : filterGeneric; + return filter(eachfn, coll, wrapAsync(iteratee), callback); + } + /** + * Returns a new array of all the values in `coll` which pass an async truth + * test. This operation is performed in parallel, but the results array will be + * in the same order as the original. + * + * @name filter + * @static + * @memberOf module:Collections + * @method + * @alias select + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback provided + * @example + * + * async.filter(['file1','file2','file3'], function(filePath, callback) { + * fs.access(filePath, function(err) { + * callback(null, !err) + * }); + * }, function(err, results) { + * // results now equals an array of the existing files + * }); + */ + function filter (coll, iteratee, callback) { + return _filter(eachOf$1, coll, iteratee, callback) + } + var filter$1 = awaitify(filter, 3); -/***/ }), + /** + * The same as [`filter`]{@link module:Collections.filter} but runs a maximum of `limit` async operations at a + * time. + * + * @name filterLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @alias selectLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback provided + */ + function filterLimit (coll, limit, iteratee, callback) { + return _filter(eachOfLimit(limit), coll, iteratee, callback) + } + var filterLimit$1 = awaitify(filterLimit, 4); -/***/ 21516: -/***/ ((module) => { + /** + * The same as [`filter`]{@link module:Collections.filter} but runs only a single async operation at a time. + * + * @name filterSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @alias selectSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results) + * @returns {Promise} a promise, if no callback provided + */ + function filterSeries (coll, iteratee, callback) { + return _filter(eachOfSeries$1, coll, iteratee, callback) + } + var filterSeries$1 = awaitify(filterSeries, 3); -"use strict"; + /** + * Calls the asynchronous function `fn` with a callback parameter that allows it + * to call itself again, in series, indefinitely. + * If an error is passed to the callback then `errback` is called with the + * error, and execution stops, otherwise it will never be called. + * + * @name forever + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} fn - an async function to call repeatedly. + * Invoked with (next). + * @param {Function} [errback] - when `fn` passes an error to it's callback, + * this function will be called, and execution stops. Invoked with (err). + * @returns {Promise} a promise that rejects if an error occurs and an errback + * is not passed + * @example + * + * async.forever( + * function(next) { + * // next is suitable for passing to things that need a callback(err [, whatever]); + * // it will result in this function being called again. + * }, + * function(err) { + * // if next is called with a value in its first parameter, it will appear + * // in here as 'err', and execution will stop. + * } + * ); + */ + function forever(fn, errback) { + var done = onlyOnce(errback); + var task = wrapAsync(ensureAsync(fn)); -/** - * Update an Error with the specified config, error code, and response. - * - * @param {Error} error The error to update. - * @param {Object} config The config. - * @param {string} [code] The error code (for example, 'ECONNABORTED'). - * @param {Object} [request] The request. - * @param {Object} [response] The response. - * @returns {Error} The error. - */ -module.exports = function enhanceError(error, config, code, request, response) { - error.config = config; - if (code) { - error.code = code; - } + function next(err) { + if (err) return done(err); + if (err === false) return; + task(next); + } + return next(); + } + var forever$1 = awaitify(forever, 2); - error.request = request; - error.response = response; - error.isAxiosError = true; + /** + * The same as [`groupBy`]{@link module:Collections.groupBy} but runs a maximum of `limit` async operations at a time. + * + * @name groupByLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.groupBy]{@link module:Collections.groupBy} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whoses + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + */ + function groupByLimit(coll, limit, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return mapLimit$1(coll, limit, (val, iterCb) => { + _iteratee(val, (err, key) => { + if (err) return iterCb(err); + return iterCb(err, {key, val}); + }); + }, (err, mapResults) => { + var result = {}; + // from MDN, handle object having an `hasOwnProperty` prop + var {hasOwnProperty} = Object.prototype; - error.toJSON = function toJSON() { - return { - // Standard - message: this.message, - name: this.name, - // Microsoft - description: this.description, - number: this.number, - // Mozilla - fileName: this.fileName, - lineNumber: this.lineNumber, - columnNumber: this.columnNumber, - stack: this.stack, - // Axios - config: this.config, - code: this.code - }; - }; - return error; -}; + for (var i = 0; i < mapResults.length; i++) { + if (mapResults[i]) { + var {key} = mapResults[i]; + var {val} = mapResults[i]; + if (hasOwnProperty.call(result, key)) { + result[key].push(val); + } else { + result[key] = [val]; + } + } + } -/***/ }), + return callback(err, result); + }); + } -/***/ 74831: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + var groupByLimit$1 = awaitify(groupByLimit, 4); -"use strict"; + /** + * Returns a new object, where each value corresponds to an array of items, from + * `coll`, that returned the corresponding key. That is, the keys of the object + * correspond to the values passed to the `iteratee` callback. + * + * Note: Since this function applies the `iteratee` to each item in parallel, + * there is no guarantee that the `iteratee` functions will complete in order. + * However, the values for each key in the `result` will be in the same order as + * the original `coll`. For Objects, the values will roughly be in the order of + * the original Objects' keys (but this can vary across JavaScript engines). + * + * @name groupBy + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whoses + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + * @example + * + * async.groupBy(['userId1', 'userId2', 'userId3'], function(userId, callback) { + * db.findById(userId, function(err, user) { + * if (err) return callback(err); + * return callback(null, user.age); + * }); + * }, function(err, result) { + * // result is object containing the userIds grouped by age + * // e.g. { 30: ['userId1', 'userId3'], 42: ['userId2']}; + * }); + */ + function groupBy (coll, iteratee, callback) { + return groupByLimit$1(coll, Infinity, iteratee, callback) + } + /** + * The same as [`groupBy`]{@link module:Collections.groupBy} but runs only a single async operation at a time. + * + * @name groupBySeries + * @static + * @memberOf module:Collections + * @method + * @see [async.groupBy]{@link module:Collections.groupBy} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whoses + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + */ + function groupBySeries (coll, iteratee, callback) { + return groupByLimit$1(coll, 1, iteratee, callback) + } -var utils = __nccwpck_require__(20328); + /** + * Logs the result of an `async` function to the `console`. Only works in + * Node.js or in browsers that support `console.log` and `console.error` (such + * as FF and Chrome). If multiple arguments are returned from the async + * function, `console.log` is called on each argument in order. + * + * @name log + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} function - The function you want to eventually apply + * all arguments to. + * @param {...*} arguments... - Any number of arguments to apply to the function. + * @example + * + * // in a module + * var hello = function(name, callback) { + * setTimeout(function() { + * callback(null, 'hello ' + name); + * }, 1000); + * }; + * + * // in the node repl + * node> async.log(hello, 'world'); + * 'hello world' + */ + var log = consoleFunc('log'); -/** - * Config-specific merge-function which creates a new config-object - * by merging two configuration objects together. - * - * @param {Object} config1 - * @param {Object} config2 - * @returns {Object} New object resulting from merging config2 to config1 - */ -module.exports = function mergeConfig(config1, config2) { - // eslint-disable-next-line no-param-reassign - config2 = config2 || {}; - var config = {}; + /** + * The same as [`mapValues`]{@link module:Collections.mapValues} but runs a maximum of `limit` async operations at a + * time. + * + * @name mapValuesLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.mapValues]{@link module:Collections.mapValues} + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + */ + function mapValuesLimit(obj, limit, iteratee, callback) { + callback = once(callback); + var newObj = {}; + var _iteratee = wrapAsync(iteratee); + return eachOfLimit(limit)(obj, (val, key, next) => { + _iteratee(val, key, (err, result) => { + if (err) return next(err); + newObj[key] = result; + next(err); + }); + }, err => callback(err, newObj)); + } - var valueFromConfig2Keys = ['url', 'method', 'data']; - var mergeDeepPropertiesKeys = ['headers', 'auth', 'proxy', 'params']; - var defaultToConfig2Keys = [ - 'baseURL', 'transformRequest', 'transformResponse', 'paramsSerializer', - 'timeout', 'timeoutMessage', 'withCredentials', 'adapter', 'responseType', 'xsrfCookieName', - 'xsrfHeaderName', 'onUploadProgress', 'onDownloadProgress', 'decompress', - 'maxContentLength', 'maxBodyLength', 'maxRedirects', 'transport', 'httpAgent', - 'httpsAgent', 'cancelToken', 'socketPath', 'responseEncoding' - ]; - var directMergeKeys = ['validateStatus']; + var mapValuesLimit$1 = awaitify(mapValuesLimit, 4); - function getMergedValue(target, source) { - if (utils.isPlainObject(target) && utils.isPlainObject(source)) { - return utils.merge(target, source); - } else if (utils.isPlainObject(source)) { - return utils.merge({}, source); - } else if (utils.isArray(source)) { - return source.slice(); + /** + * A relative of [`map`]{@link module:Collections.map}, designed for use with objects. + * + * Produces a new Object by mapping each value of `obj` through the `iteratee` + * function. The `iteratee` is called each `value` and `key` from `obj` and a + * callback for when it has finished processing. Each of these callbacks takes + * two arguments: an `error`, and the transformed item from `obj`. If `iteratee` + * passes an error to its callback, the main `callback` (for the `mapValues` + * function) is immediately called with the error. + * + * Note, the order of the keys in the result is not guaranteed. The keys will + * be roughly in the order they complete, (but this is very engine-specific) + * + * @name mapValues + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * async.mapValues({ + * f1: 'file1', + * f2: 'file2', + * f3: 'file3' + * }, function (file, key, callback) { + * fs.stat(file, callback); + * }, function(err, result) { + * // result is now a map of stats for each file, e.g. + * // { + * // f1: [stats for file1], + * // f2: [stats for file2], + * // f3: [stats for file3] + * // } + * }); + */ + function mapValues(obj, iteratee, callback) { + return mapValuesLimit$1(obj, Infinity, iteratee, callback) } - return source; - } - function mergeDeepProperties(prop) { - if (!utils.isUndefined(config2[prop])) { - config[prop] = getMergedValue(config1[prop], config2[prop]); - } else if (!utils.isUndefined(config1[prop])) { - config[prop] = getMergedValue(undefined, config1[prop]); + /** + * The same as [`mapValues`]{@link module:Collections.mapValues} but runs only a single async operation at a time. + * + * @name mapValuesSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.mapValues]{@link module:Collections.mapValues} + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + */ + function mapValuesSeries(obj, iteratee, callback) { + return mapValuesLimit$1(obj, 1, iteratee, callback) } - } - utils.forEach(valueFromConfig2Keys, function valueFromConfig2(prop) { - if (!utils.isUndefined(config2[prop])) { - config[prop] = getMergedValue(undefined, config2[prop]); + /** + * Caches the results of an async function. When creating a hash to store + * function results against, the callback is omitted from the hash and an + * optional hash function can be used. + * + * **Note: if the async function errs, the result will not be cached and + * subsequent calls will call the wrapped function.** + * + * If no hash function is specified, the first argument is used as a hash key, + * which may work reasonably if it is a string or a data type that converts to a + * distinct string. Note that objects and arrays will not behave reasonably. + * Neither will cases where the other arguments are significant. In such cases, + * specify your own hash function. + * + * The cache of results is exposed as the `memo` property of the function + * returned by `memoize`. + * + * @name memoize + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - The async function to proxy and cache results from. + * @param {Function} hasher - An optional function for generating a custom hash + * for storing results. It has all the arguments applied to it apart from the + * callback, and must be synchronous. + * @returns {AsyncFunction} a memoized version of `fn` + * @example + * + * var slow_fn = function(name, callback) { + * // do something + * callback(null, result); + * }; + * var fn = async.memoize(slow_fn); + * + * // fn can now be used as if it were slow_fn + * fn('some name', function() { + * // callback + * }); + */ + function memoize(fn, hasher = v => v) { + var memo = Object.create(null); + var queues = Object.create(null); + var _fn = wrapAsync(fn); + var memoized = initialParams((args, callback) => { + var key = hasher(...args); + if (key in memo) { + setImmediate$1(() => callback(null, ...memo[key])); + } else if (key in queues) { + queues[key].push(callback); + } else { + queues[key] = [callback]; + _fn(...args, (err, ...resultArgs) => { + // #1465 don't memoize if an error occurred + if (!err) { + memo[key] = resultArgs; + } + var q = queues[key]; + delete queues[key]; + for (var i = 0, l = q.length; i < l; i++) { + q[i](err, ...resultArgs); + } + }); + } + }); + memoized.memo = memo; + memoized.unmemoized = fn; + return memoized; } - }); - utils.forEach(mergeDeepPropertiesKeys, mergeDeepProperties); + /** + * Calls `callback` on a later loop around the event loop. In Node.js this just + * calls `process.nextTick`. In the browser it will use `setImmediate` if + * available, otherwise `setTimeout(callback, 0)`, which means other higher + * priority events may precede the execution of `callback`. + * + * This is used internally for browser-compatibility purposes. + * + * @name nextTick + * @static + * @memberOf module:Utils + * @method + * @see [async.setImmediate]{@link module:Utils.setImmediate} + * @category Util + * @param {Function} callback - The function to call on a later loop around + * the event loop. Invoked with (args...). + * @param {...*} args... - any number of additional arguments to pass to the + * callback on the next tick. + * @example + * + * var call_order = []; + * async.nextTick(function() { + * call_order.push('two'); + * // call_order now equals ['one','two'] + * }); + * call_order.push('one'); + * + * async.setImmediate(function (a, b, c) { + * // a, b, and c equal 1, 2, and 3 + * }, 1, 2, 3); + */ + var _defer$1; - utils.forEach(defaultToConfig2Keys, function defaultToConfig2(prop) { - if (!utils.isUndefined(config2[prop])) { - config[prop] = getMergedValue(undefined, config2[prop]); - } else if (!utils.isUndefined(config1[prop])) { - config[prop] = getMergedValue(undefined, config1[prop]); + if (hasNextTick) { + _defer$1 = process.nextTick; + } else if (hasSetImmediate) { + _defer$1 = setImmediate; + } else { + _defer$1 = fallback; } - }); - utils.forEach(directMergeKeys, function merge(prop) { - if (prop in config2) { - config[prop] = getMergedValue(config1[prop], config2[prop]); - } else if (prop in config1) { - config[prop] = getMergedValue(undefined, config1[prop]); - } - }); + var nextTick = wrap(_defer$1); - var axiosKeys = valueFromConfig2Keys - .concat(mergeDeepPropertiesKeys) - .concat(defaultToConfig2Keys) - .concat(directMergeKeys); + var parallel = awaitify((eachfn, tasks, callback) => { + var results = isArrayLike(tasks) ? [] : {}; - var otherKeys = Object - .keys(config1) - .concat(Object.keys(config2)) - .filter(function filterAxiosKeys(key) { - return axiosKeys.indexOf(key) === -1; - }); - - utils.forEach(otherKeys, mergeDeepProperties); - - return config; -}; + eachfn(tasks, (task, key, taskCb) => { + wrapAsync(task)((err, ...result) => { + if (result.length < 2) { + [result] = result; + } + results[key] = result; + taskCb(err); + }); + }, err => callback(err, results)); + }, 3); + /** + * Run the `tasks` collection of functions in parallel, without waiting until + * the previous function has completed. If any of the functions pass an error to + * its callback, the main `callback` is immediately called with the value of the + * error. Once the `tasks` have completed, the results are passed to the final + * `callback` as an array. + * + * **Note:** `parallel` is about kicking-off I/O tasks in parallel, not about + * parallel execution of code. If your tasks do not use any timers or perform + * any I/O, they will actually be executed in series. Any synchronous setup + * sections for each task will happen one after the other. JavaScript remains + * single-threaded. + * + * **Hint:** Use [`reflect`]{@link module:Utils.reflect} to continue the + * execution of other tasks when a task fails. + * + * It is also possible to use an object instead of an array. Each property will + * be run as a function and the results will be passed to the final `callback` + * as an object instead of an array. This can be a more readable way of handling + * results from {@link async.parallel}. + * + * @name parallel + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of + * [async functions]{@link AsyncFunction} to run. + * Each async function can complete with any number of optional `result` values. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed successfully. This function gets a results array + * (or object) containing all the result arguments passed to the task callbacks. + * Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + * + * @example + * async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ], + * // optional callback + * function(err, results) { + * // the results array will equal ['one','two'] even though + * // the second function had a shorter timeout. + * }); + * + * // an example using an object instead of an array + * async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }, function(err, results) { + * // results is now equals to: {one: 1, two: 2} + * }); + */ + function parallel$1(tasks, callback) { + return parallel(eachOf$1, tasks, callback); + } -/***/ }), + /** + * The same as [`parallel`]{@link module:ControlFlow.parallel} but runs a maximum of `limit` async operations at a + * time. + * + * @name parallelLimit + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.parallel]{@link module:ControlFlow.parallel} + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of + * [async functions]{@link AsyncFunction} to run. + * Each async function can complete with any number of optional `result` values. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed successfully. This function gets a results array + * (or object) containing all the result arguments passed to the task callbacks. + * Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + */ + function parallelLimit(tasks, limit, callback) { + return parallel(eachOfLimit(limit), tasks, callback); + } -/***/ 13211: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * A queue of tasks for the worker function to complete. + * @typedef {Iterable} QueueObject + * @memberOf module:ControlFlow + * @property {Function} length - a function returning the number of items + * waiting to be processed. Invoke with `queue.length()`. + * @property {boolean} started - a boolean indicating whether or not any + * items have been pushed and processed by the queue. + * @property {Function} running - a function returning the number of items + * currently being processed. Invoke with `queue.running()`. + * @property {Function} workersList - a function returning the array of items + * currently being processed. Invoke with `queue.workersList()`. + * @property {Function} idle - a function returning false if there are items + * waiting or being processed, or true if not. Invoke with `queue.idle()`. + * @property {number} concurrency - an integer for determining how many `worker` + * functions should be run in parallel. This property can be changed after a + * `queue` is created to alter the concurrency on-the-fly. + * @property {number} payload - an integer that specifies how many items are + * passed to the worker function at a time. only applies if this is a + * [cargo]{@link module:ControlFlow.cargo} object + * @property {AsyncFunction} push - add a new task to the `queue`. Calls `callback` + * once the `worker` has finished processing the task. Instead of a single task, + * a `tasks` array can be submitted. The respective callback is used for every + * task in the list. Invoke with `queue.push(task, [callback])`, + * @property {AsyncFunction} unshift - add a new task to the front of the `queue`. + * Invoke with `queue.unshift(task, [callback])`. + * @property {AsyncFunction} pushAsync - the same as `q.push`, except this returns + * a promise that rejects if an error occurs. + * @property {AsyncFunction} unshirtAsync - the same as `q.unshift`, except this returns + * a promise that rejects if an error occurs. + * @property {Function} remove - remove items from the queue that match a test + * function. The test function will be passed an object with a `data` property, + * and a `priority` property, if this is a + * [priorityQueue]{@link module:ControlFlow.priorityQueue} object. + * Invoked with `queue.remove(testFn)`, where `testFn` is of the form + * `function ({data, priority}) {}` and returns a Boolean. + * @property {Function} saturated - a function that sets a callback that is + * called when the number of running workers hits the `concurrency` limit, and + * further tasks will be queued. If the callback is omitted, `q.saturated()` + * returns a promise for the next occurrence. + * @property {Function} unsaturated - a function that sets a callback that is + * called when the number of running workers is less than the `concurrency` & + * `buffer` limits, and further tasks will not be queued. If the callback is + * omitted, `q.unsaturated()` returns a promise for the next occurrence. + * @property {number} buffer - A minimum threshold buffer in order to say that + * the `queue` is `unsaturated`. + * @property {Function} empty - a function that sets a callback that is called + * when the last item from the `queue` is given to a `worker`. If the callback + * is omitted, `q.empty()` returns a promise for the next occurrence. + * @property {Function} drain - a function that sets a callback that is called + * when the last item from the `queue` has returned from the `worker`. If the + * callback is omitted, `q.drain()` returns a promise for the next occurrence. + * @property {Function} error - a function that sets a callback that is called + * when a task errors. Has the signature `function(error, task)`. If the + * callback is omitted, `error()` returns a promise that rejects on the next + * error. + * @property {boolean} paused - a boolean for determining whether the queue is + * in a paused state. + * @property {Function} pause - a function that pauses the processing of tasks + * until `resume()` is called. Invoke with `queue.pause()`. + * @property {Function} resume - a function that resumes the processing of + * queued tasks when the queue is paused. Invoke with `queue.resume()`. + * @property {Function} kill - a function that removes the `drain` callback and + * empties remaining tasks from the queue forcing it to go idle. No more tasks + * should be pushed to the queue after calling this function. Invoke with `queue.kill()`. + * + * @example + * const q = aync.queue(worker, 2) + * q.push(item1) + * q.push(item2) + * q.push(item3) + * // queues are iterable, spread into an array to inspect + * const items = [...q] // [item1, item2, item3] + * // or use for of + * for (let item of q) { + * console.log(item) + * } + * + * q.drain(() => { + * console.log('all done') + * }) + * // or + * await q.drain() + */ -"use strict"; + /** + * Creates a `queue` object with the specified `concurrency`. Tasks added to the + * `queue` are processed in parallel (up to the `concurrency` limit). If all + * `worker`s are in progress, the task is queued until one becomes available. + * Once a `worker` completes a `task`, that `task`'s callback is called. + * + * @name queue + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} worker - An async function for processing a queued task. + * If you want to handle errors from an individual task, pass a callback to + * `q.push()`. Invoked with (task, callback). + * @param {number} [concurrency=1] - An `integer` for determining how many + * `worker` functions should be run in parallel. If omitted, the concurrency + * defaults to `1`. If the concurrency is `0`, an error is thrown. + * @returns {module:ControlFlow.QueueObject} A queue object to manage the tasks. Callbacks can be + * attached as certain properties to listen for specific events during the + * lifecycle of the queue. + * @example + * + * // create a queue object with concurrency 2 + * var q = async.queue(function(task, callback) { + * console.log('hello ' + task.name); + * callback(); + * }, 2); + * + * // assign a callback + * q.drain(function() { + * console.log('all items have been processed'); + * }); + * // or await the end + * await q.drain() + * + * // assign an error callback + * q.error(function(err, task) { + * console.error('task experienced an error'); + * }); + * + * // add some items to the queue + * q.push({name: 'foo'}, function(err) { + * console.log('finished processing foo'); + * }); + * // callback is optional + * q.push({name: 'bar'}); + * + * // add some items to the queue (batch-wise) + * q.push([{name: 'baz'},{name: 'bay'},{name: 'bax'}], function(err) { + * console.log('finished processing item'); + * }); + * + * // add some items to the front of the queue + * q.unshift({name: 'bar'}, function (err) { + * console.log('finished processing bar'); + * }); + */ + function queue$1 (worker, concurrency) { + var _worker = wrapAsync(worker); + return queue((items, cb) => { + _worker(items[0], cb); + }, concurrency, 1); + } + // Binary min-heap implementation used for priority queue. + // Implementation is stable, i.e. push time is considered for equal priorities + class Heap { + constructor() { + this.heap = []; + this.pushCount = Number.MIN_SAFE_INTEGER; + } -var createError = __nccwpck_require__(15226); + get length() { + return this.heap.length; + } -/** - * Resolve or reject a Promise based on response status. - * - * @param {Function} resolve A function that resolves the promise. - * @param {Function} reject A function that rejects the promise. - * @param {object} response The response. - */ -module.exports = function settle(resolve, reject, response) { - var validateStatus = response.config.validateStatus; - if (!response.status || !validateStatus || validateStatus(response.status)) { - resolve(response); - } else { - reject(createError( - 'Request failed with status code ' + response.status, - response.config, - null, - response.request, - response - )); - } -}; + empty () { + this.heap = []; + return this; + } + percUp(index) { + let p; -/***/ }), + while (index > 0 && smaller(this.heap[index], this.heap[p=parent(index)])) { + let t = this.heap[index]; + this.heap[index] = this.heap[p]; + this.heap[p] = t; -/***/ 19812: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + index = p; + } + } -"use strict"; + percDown(index) { + let l; + while ((l=leftChi(index)) < this.heap.length) { + if (l+1 < this.heap.length && smaller(this.heap[l+1], this.heap[l])) { + l = l+1; + } -var utils = __nccwpck_require__(20328); + if (smaller(this.heap[index], this.heap[l])) { + break; + } -/** - * Transform the data for a request or a response - * - * @param {Object|String} data The data to be transformed - * @param {Array} headers The headers for the request or response - * @param {Array|Function} fns A single function or Array of functions - * @returns {*} The resulting transformed data - */ -module.exports = function transformData(data, headers, fns) { - /*eslint no-param-reassign:0*/ - utils.forEach(fns, function transform(fn) { - data = fn(data, headers); - }); + let t = this.heap[index]; + this.heap[index] = this.heap[l]; + this.heap[l] = t; - return data; -}; + index = l; + } + } + push(node) { + node.pushCount = ++this.pushCount; + this.heap.push(node); + this.percUp(this.heap.length-1); + } -/***/ }), + unshift(node) { + return this.heap.push(node); + } -/***/ 98190: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + shift() { + let [top] = this.heap; -"use strict"; + this.heap[0] = this.heap[this.heap.length-1]; + this.heap.pop(); + this.percDown(0); + return top; + } -var utils = __nccwpck_require__(20328); -var normalizeHeaderName = __nccwpck_require__(36240); + toArray() { + return [...this]; + } -var DEFAULT_CONTENT_TYPE = { - 'Content-Type': 'application/x-www-form-urlencoded' -}; + *[Symbol.iterator] () { + for (let i = 0; i < this.heap.length; i++) { + yield this.heap[i].data; + } + } -function setContentTypeIfUnset(headers, value) { - if (!utils.isUndefined(headers) && utils.isUndefined(headers['Content-Type'])) { - headers['Content-Type'] = value; - } -} + remove (testFn) { + let j = 0; + for (let i = 0; i < this.heap.length; i++) { + if (!testFn(this.heap[i])) { + this.heap[j] = this.heap[i]; + j++; + } + } -function getDefaultAdapter() { - var adapter; - if (typeof XMLHttpRequest !== 'undefined') { - // For browsers use XHR adapter - adapter = __nccwpck_require__(3454); - } else if (typeof process !== 'undefined' && Object.prototype.toString.call(process) === '[object process]') { - // For node use HTTP adapter - adapter = __nccwpck_require__(68104); - } - return adapter; -} + this.heap.splice(j); -var defaults = { - adapter: getDefaultAdapter(), + for (let i = parent(this.heap.length-1); i >= 0; i--) { + this.percDown(i); + } - transformRequest: [function transformRequest(data, headers) { - normalizeHeaderName(headers, 'Accept'); - normalizeHeaderName(headers, 'Content-Type'); - if (utils.isFormData(data) || - utils.isArrayBuffer(data) || - utils.isBuffer(data) || - utils.isStream(data) || - utils.isFile(data) || - utils.isBlob(data) - ) { - return data; - } - if (utils.isArrayBufferView(data)) { - return data.buffer; - } - if (utils.isURLSearchParams(data)) { - setContentTypeIfUnset(headers, 'application/x-www-form-urlencoded;charset=utf-8'); - return data.toString(); + return this; + } } - if (utils.isObject(data)) { - setContentTypeIfUnset(headers, 'application/json;charset=utf-8'); - return JSON.stringify(data); + + function leftChi(i) { + return (i<<1)+1; } - return data; - }], - transformResponse: [function transformResponse(data) { - /*eslint no-param-reassign:0*/ - if (typeof data === 'string') { - try { - data = JSON.parse(data); - } catch (e) { /* Ignore */ } + function parent(i) { + return ((i+1)>>1)-1; } - return data; - }], - /** - * A timeout in milliseconds to abort a request. If set to 0 (default) a - * timeout is not created. - */ - timeout: 0, + function smaller(x, y) { + if (x.priority !== y.priority) { + return x.priority < y.priority; + } + else { + return x.pushCount < y.pushCount; + } + } - xsrfCookieName: 'XSRF-TOKEN', - xsrfHeaderName: 'X-XSRF-TOKEN', + /** + * The same as [async.queue]{@link module:ControlFlow.queue} only tasks are assigned a priority and + * completed in ascending priority order. + * + * @name priorityQueue + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.queue]{@link module:ControlFlow.queue} + * @category Control Flow + * @param {AsyncFunction} worker - An async function for processing a queued task. + * If you want to handle errors from an individual task, pass a callback to + * `q.push()`. + * Invoked with (task, callback). + * @param {number} concurrency - An `integer` for determining how many `worker` + * functions should be run in parallel. If omitted, the concurrency defaults to + * `1`. If the concurrency is `0`, an error is thrown. + * @returns {module:ControlFlow.QueueObject} A priorityQueue object to manage the tasks. There are two + * differences between `queue` and `priorityQueue` objects: + * * `push(task, priority, [callback])` - `priority` should be a number. If an + * array of `tasks` is given, all tasks will be assigned the same priority. + * * The `unshift` method was removed. + */ + function priorityQueue(worker, concurrency) { + // Start with a normal queue + var q = queue$1(worker, concurrency); - maxContentLength: -1, - maxBodyLength: -1, + q._tasks = new Heap(); - validateStatus: function validateStatus(status) { - return status >= 200 && status < 300; - } -}; + // Override push to accept second parameter representing priority + q.push = function(data, priority = 0, callback = () => {}) { + if (typeof callback !== 'function') { + throw new Error('task callback must be a function'); + } + q.started = true; + if (!Array.isArray(data)) { + data = [data]; + } + if (data.length === 0 && q.idle()) { + // call drain immediately if there are no tasks + return setImmediate$1(() => q.drain()); + } -defaults.headers = { - common: { - 'Accept': 'application/json, text/plain, */*' - } -}; + for (var i = 0, l = data.length; i < l; i++) { + var item = { + data: data[i], + priority, + callback + }; -utils.forEach(['delete', 'get', 'head'], function forEachMethodNoData(method) { - defaults.headers[method] = {}; -}); + q._tasks.push(item); + } -utils.forEach(['post', 'put', 'patch'], function forEachMethodWithData(method) { - defaults.headers[method] = utils.merge(DEFAULT_CONTENT_TYPE); -}); + setImmediate$1(q.process); + }; -module.exports = defaults; + // Remove unshift function + delete q.unshift; + return q; + } -/***/ }), + /** + * Runs the `tasks` array of functions in parallel, without waiting until the + * previous function has completed. Once any of the `tasks` complete or pass an + * error to its callback, the main `callback` is immediately called. It's + * equivalent to `Promise.race()`. + * + * @name race + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array} tasks - An array containing [async functions]{@link AsyncFunction} + * to run. Each function can complete with an optional `result` value. + * @param {Function} callback - A callback to run once any of the functions have + * completed. This function gets an error or result from the first function that + * completed. Invoked with (err, result). + * @returns undefined + * @example + * + * async.race([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ], + * // main callback + * function(err, result) { + * // the result will be equal to 'two' as it finishes earlier + * }); + */ + function race(tasks, callback) { + callback = once(callback); + if (!Array.isArray(tasks)) return callback(new TypeError('First argument to race must be an array of functions')); + if (!tasks.length) return callback(); + for (var i = 0, l = tasks.length; i < l; i++) { + wrapAsync(tasks[i])(callback); + } + } -/***/ 77065: -/***/ ((module) => { + var race$1 = awaitify(race, 2); -"use strict"; + /** + * Same as [`reduce`]{@link module:Collections.reduce}, only operates on `array` in reverse order. + * + * @name reduceRight + * @static + * @memberOf module:Collections + * @method + * @see [async.reduce]{@link module:Collections.reduce} + * @alias foldr + * @category Collection + * @param {Array} array - A collection to iterate over. + * @param {*} memo - The initial state of the reduction. + * @param {AsyncFunction} iteratee - A function applied to each item in the + * array to produce the next step in the reduction. + * The `iteratee` should complete with the next state of the reduction. + * If the iteratee complete with an error, the reduction is stopped and the + * main `callback` is immediately called with the error. + * Invoked with (memo, item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the reduced value. Invoked with + * (err, result). + * @returns {Promise} a promise, if no callback is passed + */ + function reduceRight (array, memo, iteratee, callback) { + var reversed = [...array].reverse(); + return reduce$1(reversed, memo, iteratee, callback); + } + /** + * Wraps the async function in another function that always completes with a + * result object, even when it errors. + * + * The result object has either the property `error` or `value`. + * + * @name reflect + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - The async function you want to wrap + * @returns {Function} - A function that always passes null to it's callback as + * the error. The second argument to the callback will be an `object` with + * either an `error` or a `value` property. + * @example + * + * async.parallel([ + * async.reflect(function(callback) { + * // do some stuff ... + * callback(null, 'one'); + * }), + * async.reflect(function(callback) { + * // do some more stuff but error ... + * callback('bad stuff happened'); + * }), + * async.reflect(function(callback) { + * // do some more stuff ... + * callback(null, 'two'); + * }) + * ], + * // optional callback + * function(err, results) { + * // values + * // results[0].value = 'one' + * // results[1].error = 'bad stuff happened' + * // results[2].value = 'two' + * }); + */ + function reflect(fn) { + var _fn = wrapAsync(fn); + return initialParams(function reflectOn(args, reflectCallback) { + args.push((error, ...cbArgs) => { + let retVal = {}; + if (error) { + retVal.error = error; + } + if (cbArgs.length > 0){ + var value = cbArgs; + if (cbArgs.length <= 1) { + [value] = cbArgs; + } + retVal.value = value; + } + reflectCallback(null, retVal); + }); -module.exports = function bind(fn, thisArg) { - return function wrap() { - var args = new Array(arguments.length); - for (var i = 0; i < args.length; i++) { - args[i] = arguments[i]; + return _fn.apply(this, args); + }); } - return fn.apply(thisArg, args); - }; -}; + /** + * A helper function that wraps an array or an object of functions with `reflect`. + * + * @name reflectAll + * @static + * @memberOf module:Utils + * @method + * @see [async.reflect]{@link module:Utils.reflect} + * @category Util + * @param {Array|Object|Iterable} tasks - The collection of + * [async functions]{@link AsyncFunction} to wrap in `async.reflect`. + * @returns {Array} Returns an array of async functions, each wrapped in + * `async.reflect` + * @example + * + * let tasks = [ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * // do some more stuff but error ... + * callback(new Error('bad stuff happened')); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]; + * + * async.parallel(async.reflectAll(tasks), + * // optional callback + * function(err, results) { + * // values + * // results[0].value = 'one' + * // results[1].error = Error('bad stuff happened') + * // results[2].value = 'two' + * }); + * + * // an example using an object instead of an array + * let tasks = { + * one: function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * two: function(callback) { + * callback('two'); + * }, + * three: function(callback) { + * setTimeout(function() { + * callback(null, 'three'); + * }, 100); + * } + * }; + * + * async.parallel(async.reflectAll(tasks), + * // optional callback + * function(err, results) { + * // values + * // results.one.value = 'one' + * // results.two.error = 'two' + * // results.three.value = 'three' + * }); + */ + function reflectAll(tasks) { + var results; + if (Array.isArray(tasks)) { + results = tasks.map(reflect); + } else { + results = {}; + Object.keys(tasks).forEach(key => { + results[key] = reflect.call(this, tasks[key]); + }); + } + return results; + } -/***/ }), + function reject(eachfn, arr, _iteratee, callback) { + const iteratee = wrapAsync(_iteratee); + return _filter(eachfn, arr, (value, cb) => { + iteratee(value, (err, v) => { + cb(err, !v); + }); + }, callback); + } -/***/ 30646: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * The opposite of [`filter`]{@link module:Collections.filter}. Removes values that pass an `async` truth test. + * + * @name reject + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * async.reject(['file1','file2','file3'], function(filePath, callback) { + * fs.access(filePath, function(err) { + * callback(null, !err) + * }); + * }, function(err, results) { + * // results now equals an array of missing files + * createFiles(results); + * }); + */ + function reject$1 (coll, iteratee, callback) { + return reject(eachOf$1, coll, iteratee, callback) + } + var reject$2 = awaitify(reject$1, 3); -"use strict"; + /** + * The same as [`reject`]{@link module:Collections.reject} but runs a maximum of `limit` async operations at a + * time. + * + * @name rejectLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.reject]{@link module:Collections.reject} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ + function rejectLimit (coll, limit, iteratee, callback) { + return reject(eachOfLimit(limit), coll, iteratee, callback) + } + var rejectLimit$1 = awaitify(rejectLimit, 4); + /** + * The same as [`reject`]{@link module:Collections.reject} but runs only a single async operation at a time. + * + * @name rejectSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.reject]{@link module:Collections.reject} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ + function rejectSeries (coll, iteratee, callback) { + return reject(eachOfSeries$1, coll, iteratee, callback) + } + var rejectSeries$1 = awaitify(rejectSeries, 3); -var utils = __nccwpck_require__(20328); + function constant$1(value) { + return function () { + return value; + } + } -function encode(val) { - return encodeURIComponent(val). - replace(/%3A/gi, ':'). - replace(/%24/g, '$'). - replace(/%2C/gi, ','). - replace(/%20/g, '+'). - replace(/%5B/gi, '['). - replace(/%5D/gi, ']'); -} + /** + * Attempts to get a successful response from `task` no more than `times` times + * before returning an error. If the task is successful, the `callback` will be + * passed the result of the successful task. If all attempts fail, the callback + * will be passed the error and result (if any) of the final attempt. + * + * @name retry + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @see [async.retryable]{@link module:ControlFlow.retryable} + * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - Can be either an + * object with `times` and `interval` or a number. + * * `times` - The number of attempts to make before giving up. The default + * is `5`. + * * `interval` - The time to wait between retries, in milliseconds. The + * default is `0`. The interval may also be specified as a function of the + * retry count (see example). + * * `errorFilter` - An optional synchronous function that is invoked on + * erroneous result. If it returns `true` the retry attempts will continue; + * if the function returns `false` the retry flow is aborted with the current + * attempt's error and result being returned to the final callback. + * Invoked with (err). + * * If `opts` is a number, the number specifies the number of times to retry, + * with the default interval of `0`. + * @param {AsyncFunction} task - An async function to retry. + * Invoked with (callback). + * @param {Function} [callback] - An optional callback which is called when the + * task has succeeded, or after the final failed attempt. It receives the `err` + * and `result` arguments of the last attempt at completing the `task`. Invoked + * with (err, results). + * @returns {Promise} a promise if no callback provided + * + * @example + * + * // The `retry` function can be used as a stand-alone control flow by passing + * // a callback, as shown below: + * + * // try calling apiMethod 3 times + * async.retry(3, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod 3 times, waiting 200 ms between each retry + * async.retry({times: 3, interval: 200}, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod 10 times with exponential backoff + * // (i.e. intervals of 100, 200, 400, 800, 1600, ... milliseconds) + * async.retry({ + * times: 10, + * interval: function(retryCount) { + * return 50 * Math.pow(2, retryCount); + * } + * }, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod the default 5 times no delay between each retry + * async.retry(apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod only when error condition satisfies, all other + * // errors will abort the retry control flow and return to final callback + * async.retry({ + * errorFilter: function(err) { + * return err.message === 'Temporary error'; // only retry on a specific error + * } + * }, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // to retry individual methods that are not as reliable within other + * // control flow functions, use the `retryable` wrapper: + * async.auto({ + * users: api.getUsers.bind(api), + * payments: async.retryable(3, api.getPayments.bind(api)) + * }, function(err, results) { + * // do something with the results + * }); + * + */ + const DEFAULT_TIMES = 5; + const DEFAULT_INTERVAL = 0; -/** - * Build a URL by appending params to the end - * - * @param {string} url The base of the url (e.g., http://www.google.com) - * @param {object} [params] The params to be appended - * @returns {string} The formatted url - */ -module.exports = function buildURL(url, params, paramsSerializer) { - /*eslint no-param-reassign:0*/ - if (!params) { - return url; - } + function retry(opts, task, callback) { + var options = { + times: DEFAULT_TIMES, + intervalFunc: constant$1(DEFAULT_INTERVAL) + }; - var serializedParams; - if (paramsSerializer) { - serializedParams = paramsSerializer(params); - } else if (utils.isURLSearchParams(params)) { - serializedParams = params.toString(); - } else { - var parts = []; + if (arguments.length < 3 && typeof opts === 'function') { + callback = task || promiseCallback(); + task = opts; + } else { + parseTimes(options, opts); + callback = callback || promiseCallback(); + } - utils.forEach(params, function serialize(val, key) { - if (val === null || typeof val === 'undefined') { - return; - } + if (typeof task !== 'function') { + throw new Error("Invalid arguments for async.retry"); + } - if (utils.isArray(val)) { - key = key + '[]'; - } else { - val = [val]; - } + var _task = wrapAsync(task); - utils.forEach(val, function parseValue(v) { - if (utils.isDate(v)) { - v = v.toISOString(); - } else if (utils.isObject(v)) { - v = JSON.stringify(v); + var attempt = 1; + function retryAttempt() { + _task((err, ...args) => { + if (err === false) return + if (err && attempt++ < options.times && + (typeof options.errorFilter != 'function' || + options.errorFilter(err))) { + setTimeout(retryAttempt, options.intervalFunc(attempt - 1)); + } else { + callback(err, ...args); + } + }); } - parts.push(encode(key) + '=' + encode(v)); - }); - }); - - serializedParams = parts.join('&'); - } - if (serializedParams) { - var hashmarkIndex = url.indexOf('#'); - if (hashmarkIndex !== -1) { - url = url.slice(0, hashmarkIndex); + retryAttempt(); + return callback[PROMISE_SYMBOL] } - url += (url.indexOf('?') === -1 ? '?' : '&') + serializedParams; - } - - return url; -}; + function parseTimes(acc, t) { + if (typeof t === 'object') { + acc.times = +t.times || DEFAULT_TIMES; + acc.intervalFunc = typeof t.interval === 'function' ? + t.interval : + constant$1(+t.interval || DEFAULT_INTERVAL); -/***/ }), + acc.errorFilter = t.errorFilter; + } else if (typeof t === 'number' || typeof t === 'string') { + acc.times = +t || DEFAULT_TIMES; + } else { + throw new Error("Invalid arguments for async.retry"); + } + } -/***/ 57189: -/***/ ((module) => { - -"use strict"; - - -/** - * Creates a new URL by combining the specified URLs - * - * @param {string} baseURL The base URL - * @param {string} relativeURL The relative URL - * @returns {string} The combined URL - */ -module.exports = function combineURLs(baseURL, relativeURL) { - return relativeURL - ? baseURL.replace(/\/+$/, '') + '/' + relativeURL.replace(/^\/+/, '') - : baseURL; -}; - - -/***/ }), - -/***/ 21545: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var utils = __nccwpck_require__(20328); - -module.exports = ( - utils.isStandardBrowserEnv() ? - - // Standard browser envs support document.cookie - (function standardBrowserEnv() { - return { - write: function write(name, value, expires, path, domain, secure) { - var cookie = []; - cookie.push(name + '=' + encodeURIComponent(value)); - - if (utils.isNumber(expires)) { - cookie.push('expires=' + new Date(expires).toGMTString()); - } - - if (utils.isString(path)) { - cookie.push('path=' + path); - } - - if (utils.isString(domain)) { - cookie.push('domain=' + domain); - } - - if (secure === true) { - cookie.push('secure'); - } - - document.cookie = cookie.join('; '); - }, - - read: function read(name) { - var match = document.cookie.match(new RegExp('(^|;\\s*)(' + name + ')=([^;]*)')); - return (match ? decodeURIComponent(match[3]) : null); - }, - - remove: function remove(name) { - this.write(name, '', Date.now() - 86400000); + /** + * A close relative of [`retry`]{@link module:ControlFlow.retry}. This method + * wraps a task and makes it retryable, rather than immediately calling it + * with retries. + * + * @name retryable + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.retry]{@link module:ControlFlow.retry} + * @category Control Flow + * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - optional + * options, exactly the same as from `retry`, except for a `opts.arity` that + * is the arity of the `task` function, defaulting to `task.length` + * @param {AsyncFunction} task - the asynchronous function to wrap. + * This function will be passed any arguments passed to the returned wrapper. + * Invoked with (...args, callback). + * @returns {AsyncFunction} The wrapped function, which when invoked, will + * retry on an error, based on the parameters specified in `opts`. + * This function will accept the same parameters as `task`. + * @example + * + * async.auto({ + * dep1: async.retryable(3, getFromFlakyService), + * process: ["dep1", async.retryable(3, function (results, cb) { + * maybeProcessData(results.dep1, cb); + * })] + * }, callback); + */ + function retryable (opts, task) { + if (!task) { + task = opts; + opts = null; } - }; - })() : - - // Non standard browser env (web workers, react-native) lack needed support. - (function nonStandardBrowserEnv() { - return { - write: function write() {}, - read: function read() { return null; }, - remove: function remove() {} - }; - })() -); - - -/***/ }), - -/***/ 41301: -/***/ ((module) => { - -"use strict"; - - -/** - * Determines whether the specified URL is absolute - * - * @param {string} url The URL to test - * @returns {boolean} True if the specified URL is absolute, otherwise false - */ -module.exports = function isAbsoluteURL(url) { - // A URL is considered absolute if it begins with "://" or "//" (protocol-relative URL). - // RFC 3986 defines scheme name as a sequence of characters beginning with a letter and followed - // by any combination of letters, digits, plus, period, or hyphen. - return /^([a-z][a-z\d\+\-\.]*:)?\/\//i.test(url); -}; - - -/***/ }), - -/***/ 60650: -/***/ ((module) => { - -"use strict"; - - -/** - * Determines whether the payload is an error thrown by Axios - * - * @param {*} payload The value to test - * @returns {boolean} True if the payload is an error thrown by Axios, otherwise false - */ -module.exports = function isAxiosError(payload) { - return (typeof payload === 'object') && (payload.isAxiosError === true); -}; - - -/***/ }), + let arity = (opts && opts.arity) || task.length; + if (isAsync(task)) { + arity += 1; + } + var _task = wrapAsync(task); + return initialParams((args, callback) => { + if (args.length < arity - 1 || callback == null) { + args.push(callback); + callback = promiseCallback(); + } + function taskFn(cb) { + _task(...args, cb); + } -/***/ 33608: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (opts) retry(opts, taskFn, callback); + else retry(taskFn, callback); -"use strict"; + return callback[PROMISE_SYMBOL] + }); + } + /** + * Run the functions in the `tasks` collection in series, each one running once + * the previous function has completed. If any functions in the series pass an + * error to its callback, no more functions are run, and `callback` is + * immediately called with the value of the error. Otherwise, `callback` + * receives an array of results when `tasks` have completed. + * + * It is also possible to use an object instead of an array. Each property will + * be run as a function, and the results will be passed to the final `callback` + * as an object instead of an array. This can be a more readable way of handling + * results from {@link async.series}. + * + * **Note** that while many implementations preserve the order of object + * properties, the [ECMAScript Language Specification](http://www.ecma-international.org/ecma-262/5.1/#sec-8.6) + * explicitly states that + * + * > The mechanics and order of enumerating the properties is not specified. + * + * So if you rely on the order in which your series of functions are executed, + * and want this to work on all platforms, consider using an array. + * + * @name series + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing + * [async functions]{@link AsyncFunction} to run in series. + * Each function can complete with any number of optional `result` values. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed. This function gets a results array (or object) + * containing all the result arguments passed to the `task` callbacks. Invoked + * with (err, result). + * @return {Promise} a promise, if no callback is passed + * @example + * async.series([ + * function(callback) { + * // do some stuff ... + * callback(null, 'one'); + * }, + * function(callback) { + * // do some more stuff ... + * callback(null, 'two'); + * } + * ], + * // optional callback + * function(err, results) { + * // results is now equal to ['one', 'two'] + * }); + * + * async.series({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback){ + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }, function(err, results) { + * // results is now equal to: {one: 1, two: 2} + * }); + */ + function series(tasks, callback) { + return parallel(eachOfSeries$1, tasks, callback); + } -var utils = __nccwpck_require__(20328); + /** + * Returns `true` if at least one element in the `coll` satisfies an async test. + * If any iteratee call returns `true`, the main `callback` is immediately + * called. + * + * @name some + * @static + * @memberOf module:Collections + * @method + * @alias any + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in parallel. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * async.some(['file1','file2','file3'], function(filePath, callback) { + * fs.access(filePath, function(err) { + * callback(null, !err) + * }); + * }, function(err, result) { + * // if result is true then at least one of the files exists + * }); + */ + function some(coll, iteratee, callback) { + return _createTester(Boolean, res => res)(eachOf$1, coll, iteratee, callback) + } + var some$1 = awaitify(some, 3); -module.exports = ( - utils.isStandardBrowserEnv() ? + /** + * The same as [`some`]{@link module:Collections.some} but runs a maximum of `limit` async operations at a time. + * + * @name someLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.some]{@link module:Collections.some} + * @alias anyLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in parallel. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ + function someLimit(coll, limit, iteratee, callback) { + return _createTester(Boolean, res => res)(eachOfLimit(limit), coll, iteratee, callback) + } + var someLimit$1 = awaitify(someLimit, 4); - // Standard browser envs have full support of the APIs needed to test - // whether the request URL is of the same origin as current location. - (function standardBrowserEnv() { - var msie = /(msie|trident)/i.test(navigator.userAgent); - var urlParsingNode = document.createElement('a'); - var originURL; + /** + * The same as [`some`]{@link module:Collections.some} but runs only a single async operation at a time. + * + * @name someSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.some]{@link module:Collections.some} + * @alias anySeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in series. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ + function someSeries(coll, iteratee, callback) { + return _createTester(Boolean, res => res)(eachOfSeries$1, coll, iteratee, callback) + } + var someSeries$1 = awaitify(someSeries, 3); - /** - * Parse a URL to discover it's components - * - * @param {String} url The URL to be parsed - * @returns {Object} - */ - function resolveURL(url) { - var href = url; + /** + * Sorts a list by the results of running each `coll` value through an async + * `iteratee`. + * + * @name sortBy + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a value to use as the sort criteria as + * its `result`. + * Invoked with (item, callback). + * @param {Function} callback - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is the items + * from the original `coll` sorted by the values returned by the `iteratee` + * calls. Invoked with (err, results). + * @returns {Promise} a promise, if no callback passed + * @example + * + * async.sortBy(['file1','file2','file3'], function(file, callback) { + * fs.stat(file, function(err, stats) { + * callback(err, stats.mtime); + * }); + * }, function(err, results) { + * // results is now the original array of files sorted by + * // modified date + * }); + * + * // By modifying the callback parameter the + * // sorting order can be influenced: + * + * // ascending order + * async.sortBy([1,9,3,5], function(x, callback) { + * callback(null, x); + * }, function(err,result) { + * // result callback + * }); + * + * // descending order + * async.sortBy([1,9,3,5], function(x, callback) { + * callback(null, x*-1); //<- x*-1 instead of x, turns the order around + * }, function(err,result) { + * // result callback + * }); + */ + function sortBy (coll, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return map$1(coll, (x, iterCb) => { + _iteratee(x, (err, criteria) => { + if (err) return iterCb(err); + iterCb(err, {value: x, criteria}); + }); + }, (err, results) => { + if (err) return callback(err); + callback(null, results.sort(comparator).map(v => v.value)); + }); - if (msie) { - // IE needs attribute set twice to normalize properties - urlParsingNode.setAttribute('href', href); - href = urlParsingNode.href; + function comparator(left, right) { + var a = left.criteria, b = right.criteria; + return a < b ? -1 : a > b ? 1 : 0; } + } + var sortBy$1 = awaitify(sortBy, 3); - urlParsingNode.setAttribute('href', href); - - // urlParsingNode provides the UrlUtils interface - http://url.spec.whatwg.org/#urlutils - return { - href: urlParsingNode.href, - protocol: urlParsingNode.protocol ? urlParsingNode.protocol.replace(/:$/, '') : '', - host: urlParsingNode.host, - search: urlParsingNode.search ? urlParsingNode.search.replace(/^\?/, '') : '', - hash: urlParsingNode.hash ? urlParsingNode.hash.replace(/^#/, '') : '', - hostname: urlParsingNode.hostname, - port: urlParsingNode.port, - pathname: (urlParsingNode.pathname.charAt(0) === '/') ? - urlParsingNode.pathname : - '/' + urlParsingNode.pathname - }; - } - - originURL = resolveURL(window.location.href); - - /** - * Determine if a URL shares the same origin as the current location - * - * @param {String} requestURL The URL to test - * @returns {boolean} True if URL shares the same origin, otherwise false - */ - return function isURLSameOrigin(requestURL) { - var parsed = (utils.isString(requestURL)) ? resolveURL(requestURL) : requestURL; - return (parsed.protocol === originURL.protocol && - parsed.host === originURL.host); - }; - })() : + /** + * Sets a time limit on an asynchronous function. If the function does not call + * its callback within the specified milliseconds, it will be called with a + * timeout error. The code property for the error object will be `'ETIMEDOUT'`. + * + * @name timeout + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} asyncFn - The async function to limit in time. + * @param {number} milliseconds - The specified time limit. + * @param {*} [info] - Any variable you want attached (`string`, `object`, etc) + * to timeout Error for more information.. + * @returns {AsyncFunction} Returns a wrapped function that can be used with any + * of the control flow functions. + * Invoke this function with the same parameters as you would `asyncFunc`. + * @example + * + * function myFunction(foo, callback) { + * doAsyncTask(foo, function(err, data) { + * // handle errors + * if (err) return callback(err); + * + * // do some stuff ... + * + * // return processed data + * return callback(null, data); + * }); + * } + * + * var wrapped = async.timeout(myFunction, 1000); + * + * // call `wrapped` as you would `myFunction` + * wrapped({ bar: 'bar' }, function(err, data) { + * // if `myFunction` takes < 1000 ms to execute, `err` + * // and `data` will have their expected values + * + * // else `err` will be an Error with the code 'ETIMEDOUT' + * }); + */ + function timeout(asyncFn, milliseconds, info) { + var fn = wrapAsync(asyncFn); - // Non standard browser envs (web workers, react-native) lack needed support. - (function nonStandardBrowserEnv() { - return function isURLSameOrigin() { - return true; - }; - })() -); + return initialParams((args, callback) => { + var timedOut = false; + var timer; + function timeoutCallback() { + var name = asyncFn.name || 'anonymous'; + var error = new Error('Callback function "' + name + '" timed out.'); + error.code = 'ETIMEDOUT'; + if (info) { + error.info = info; + } + timedOut = true; + callback(error); + } -/***/ }), + args.push((...cbArgs) => { + if (!timedOut) { + callback(...cbArgs); + clearTimeout(timer); + } + }); -/***/ 36240: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // setup timer and call original function + timer = setTimeout(timeoutCallback, milliseconds); + fn(...args); + }); + } -"use strict"; + function range(size) { + var result = Array(size); + while (size--) { + result[size] = size; + } + return result; + } + /** + * The same as [times]{@link module:ControlFlow.times} but runs a maximum of `limit` async operations at a + * time. + * + * @name timesLimit + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.times]{@link module:ControlFlow.times} + * @category Control Flow + * @param {number} count - The number of times to run the function. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see [async.map]{@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + */ + function timesLimit(count, limit, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return mapLimit$1(range(count), limit, _iteratee, callback); + } -var utils = __nccwpck_require__(20328); + /** + * Calls the `iteratee` function `n` times, and accumulates results in the same + * manner you would use with [map]{@link module:Collections.map}. + * + * @name times + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.map]{@link module:Collections.map} + * @category Control Flow + * @param {number} n - The number of times to run the function. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see {@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + * @example + * + * // Pretend this is some complicated async factory + * var createUser = function(id, callback) { + * callback(null, { + * id: 'user' + id + * }); + * }; + * + * // generate 5 users + * async.times(5, function(n, next) { + * createUser(n, function(err, user) { + * next(err, user); + * }); + * }, function(err, users) { + * // we should now have 5 users + * }); + */ + function times (n, iteratee, callback) { + return timesLimit(n, Infinity, iteratee, callback) + } -module.exports = function normalizeHeaderName(headers, normalizedName) { - utils.forEach(headers, function processHeader(value, name) { - if (name !== normalizedName && name.toUpperCase() === normalizedName.toUpperCase()) { - headers[normalizedName] = value; - delete headers[name]; + /** + * The same as [times]{@link module:ControlFlow.times} but runs only a single async operation at a time. + * + * @name timesSeries + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.times]{@link module:ControlFlow.times} + * @category Control Flow + * @param {number} n - The number of times to run the function. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see {@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + */ + function timesSeries (n, iteratee, callback) { + return timesLimit(n, 1, iteratee, callback) } - }); -}; + /** + * A relative of `reduce`. Takes an Object or Array, and iterates over each + * element in parallel, each step potentially mutating an `accumulator` value. + * The type of the accumulator defaults to the type of collection passed in. + * + * @name transform + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {*} [accumulator] - The initial state of the transform. If omitted, + * it will default to an empty Object or Array, depending on the type of `coll` + * @param {AsyncFunction} iteratee - A function applied to each item in the + * collection that potentially modifies the accumulator. + * Invoked with (accumulator, item, key, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the transformed accumulator. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * async.transform([1,2,3], function(acc, item, index, callback) { + * // pointless async: + * process.nextTick(function() { + * acc[index] = item * 2 + * callback(null) + * }); + * }, function(err, result) { + * // result is now equal to [2, 4, 6] + * }); + * + * @example + * + * async.transform({a: 1, b: 2, c: 3}, function (obj, val, key, callback) { + * setImmediate(function () { + * obj[key] = val * 2; + * callback(); + * }) + * }, function (err, result) { + * // result is equal to {a: 2, b: 4, c: 6} + * }) + */ + function transform (coll, accumulator, iteratee, callback) { + if (arguments.length <= 3 && typeof accumulator === 'function') { + callback = iteratee; + iteratee = accumulator; + accumulator = Array.isArray(coll) ? [] : {}; + } + callback = once(callback || promiseCallback()); + var _iteratee = wrapAsync(iteratee); -/***/ }), + eachOf$1(coll, (v, k, cb) => { + _iteratee(accumulator, v, k, cb); + }, err => callback(err, accumulator)); + return callback[PROMISE_SYMBOL] + } -/***/ 86455: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * It runs each task in series but stops whenever any of the functions were + * successful. If one of the tasks were successful, the `callback` will be + * passed the result of the successful task. If all tasks fail, the callback + * will be passed the error and result (if any) of the final attempt. + * + * @name tryEach + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing functions to + * run, each function is passed a `callback(err, result)` it must call on + * completion with an error `err` (which can be `null`) and an optional `result` + * value. + * @param {Function} [callback] - An optional callback which is called when one + * of the tasks has succeeded, or all have failed. It receives the `err` and + * `result` arguments of the last attempt at completing the `task`. Invoked with + * (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * async.tryEach([ + * function getDataFromFirstWebsite(callback) { + * // Try getting the data from the first website + * callback(err, data); + * }, + * function getDataFromSecondWebsite(callback) { + * // First website failed, + * // Try getting the data from the backup website + * callback(err, data); + * } + * ], + * // optional callback + * function(err, results) { + * Now do something with the data. + * }); + * + */ + function tryEach(tasks, callback) { + var error = null; + var result; + return eachSeries$1(tasks, (task, taskCb) => { + wrapAsync(task)((err, ...args) => { + if (err === false) return taskCb(err); -"use strict"; + if (args.length < 2) { + [result] = args; + } else { + result = args; + } + error = err; + taskCb(err ? null : {}); + }); + }, () => callback(error, result)); + } + var tryEach$1 = awaitify(tryEach); -var utils = __nccwpck_require__(20328); + /** + * Undoes a [memoize]{@link module:Utils.memoize}d function, reverting it to the original, + * unmemoized form. Handy for testing. + * + * @name unmemoize + * @static + * @memberOf module:Utils + * @method + * @see [async.memoize]{@link module:Utils.memoize} + * @category Util + * @param {AsyncFunction} fn - the memoized function + * @returns {AsyncFunction} a function that calls the original unmemoized function + */ + function unmemoize(fn) { + return (...args) => { + return (fn.unmemoized || fn)(...args); + }; + } -// Headers whose duplicates are ignored by node -// c.f. https://nodejs.org/api/http.html#http_message_headers -var ignoreDuplicateOf = [ - 'age', 'authorization', 'content-length', 'content-type', 'etag', - 'expires', 'from', 'host', 'if-modified-since', 'if-unmodified-since', - 'last-modified', 'location', 'max-forwards', 'proxy-authorization', - 'referer', 'retry-after', 'user-agent' -]; + /** + * Repeatedly call `iteratee`, while `test` returns `true`. Calls `callback` when + * stopped, or an error occurs. + * + * @name whilst + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} test - asynchronous truth test to perform before each + * execution of `iteratee`. Invoked with (). + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` passes. Invoked with (callback). + * @param {Function} [callback] - A callback which is called after the test + * function has failed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + * @example + * + * var count = 0; + * async.whilst( + * function test(cb) { cb(null, count < 5); }, + * function iter(callback) { + * count++; + * setTimeout(function() { + * callback(null, count); + * }, 1000); + * }, + * function (err, n) { + * // 5 seconds have passed, n = 5 + * } + * ); + */ + function whilst(test, iteratee, callback) { + callback = onlyOnce(callback); + var _fn = wrapAsync(iteratee); + var _test = wrapAsync(test); + var results = []; -/** - * Parse headers into an object - * - * ``` - * Date: Wed, 27 Aug 2014 08:58:49 GMT - * Content-Type: application/json - * Connection: keep-alive - * Transfer-Encoding: chunked - * ``` - * - * @param {String} headers Headers needing to be parsed - * @returns {Object} Headers parsed into an object - */ -module.exports = function parseHeaders(headers) { - var parsed = {}; - var key; - var val; - var i; + function next(err, ...rest) { + if (err) return callback(err); + results = rest; + if (err === false) return; + _test(check); + } - if (!headers) { return parsed; } + function check(err, truth) { + if (err) return callback(err); + if (err === false) return; + if (!truth) return callback(null, ...results); + _fn(next); + } - utils.forEach(headers.split('\n'), function parser(line) { - i = line.indexOf(':'); - key = utils.trim(line.substr(0, i)).toLowerCase(); - val = utils.trim(line.substr(i + 1)); + return _test(check); + } + var whilst$1 = awaitify(whilst, 3); - if (key) { - if (parsed[key] && ignoreDuplicateOf.indexOf(key) >= 0) { - return; - } - if (key === 'set-cookie') { - parsed[key] = (parsed[key] ? parsed[key] : []).concat([val]); - } else { - parsed[key] = parsed[key] ? parsed[key] + ', ' + val : val; - } + /** + * Repeatedly call `iteratee` until `test` returns `true`. Calls `callback` when + * stopped, or an error occurs. `callback` will be passed an error and any + * arguments passed to the final `iteratee`'s callback. + * + * The inverse of [whilst]{@link module:ControlFlow.whilst}. + * + * @name until + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.whilst]{@link module:ControlFlow.whilst} + * @category Control Flow + * @param {AsyncFunction} test - asynchronous truth test to perform before each + * execution of `iteratee`. Invoked with (callback). + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` fails. Invoked with (callback). + * @param {Function} [callback] - A callback which is called after the test + * function has passed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if a callback is not passed + * + * @example + * const results = [] + * let finished = false + * async.until(function test(page, cb) { + * cb(null, finished) + * }, function iter(next) { + * fetchPage(url, (err, body) => { + * if (err) return next(err) + * results = results.concat(body.objects) + * finished = !!body.next + * next(err) + * }) + * }, function done (err) { + * // all pages have been fetched + * }) + */ + function until(test, iteratee, callback) { + const _test = wrapAsync(test); + return whilst$1((cb) => _test((err, truth) => cb (err, !truth)), iteratee, callback); } - }); - return parsed; -}; + /** + * Runs the `tasks` array of functions in series, each passing their results to + * the next in the array. However, if any of the `tasks` pass an error to their + * own callback, the next function is not executed, and the main `callback` is + * immediately called with the error. + * + * @name waterfall + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array} tasks - An array of [async functions]{@link AsyncFunction} + * to run. + * Each function should complete with any number of `result` values. + * The `result` values will be passed as arguments, in order, to the next task. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed. This will be passed the results of the last task's + * callback. Invoked with (err, [results]). + * @returns undefined + * @example + * + * async.waterfall([ + * function(callback) { + * callback(null, 'one', 'two'); + * }, + * function(arg1, arg2, callback) { + * // arg1 now equals 'one' and arg2 now equals 'two' + * callback(null, 'three'); + * }, + * function(arg1, callback) { + * // arg1 now equals 'three' + * callback(null, 'done'); + * } + * ], function (err, result) { + * // result now equals 'done' + * }); + * + * // Or, with named functions: + * async.waterfall([ + * myFirstFunction, + * mySecondFunction, + * myLastFunction, + * ], function (err, result) { + * // result now equals 'done' + * }); + * function myFirstFunction(callback) { + * callback(null, 'one', 'two'); + * } + * function mySecondFunction(arg1, arg2, callback) { + * // arg1 now equals 'one' and arg2 now equals 'two' + * callback(null, 'three'); + * } + * function myLastFunction(arg1, callback) { + * // arg1 now equals 'three' + * callback(null, 'done'); + * } + */ + function waterfall (tasks, callback) { + callback = once(callback); + if (!Array.isArray(tasks)) return callback(new Error('First argument to waterfall must be an array of functions')); + if (!tasks.length) return callback(); + var taskIndex = 0; + function nextTask(args) { + var task = wrapAsync(tasks[taskIndex++]); + task(...args, onlyOnce(next)); + } -/***/ }), + function next(err, ...args) { + if (err === false) return + if (err || taskIndex === tasks.length) { + return callback(err, ...args); + } + nextTask(args); + } -/***/ 74850: -/***/ ((module) => { + nextTask([]); + } -"use strict"; + var waterfall$1 = awaitify(waterfall); + /** + * An "async function" in the context of Async is an asynchronous function with + * a variable number of parameters, with the final parameter being a callback. + * (`function (arg1, arg2, ..., callback) {}`) + * The final callback is of the form `callback(err, results...)`, which must be + * called once the function is completed. The callback should be called with a + * Error as its first argument to signal that an error occurred. + * Otherwise, if no error occurred, it should be called with `null` as the first + * argument, and any additional `result` arguments that may apply, to signal + * successful completion. + * The callback must be called exactly once, ideally on a later tick of the + * JavaScript event loop. + * + * This type of function is also referred to as a "Node-style async function", + * or a "continuation passing-style function" (CPS). Most of the methods of this + * library are themselves CPS/Node-style async functions, or functions that + * return CPS/Node-style async functions. + * + * Wherever we accept a Node-style async function, we also directly accept an + * [ES2017 `async` function]{@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function}. + * In this case, the `async` function will not be passed a final callback + * argument, and any thrown error will be used as the `err` argument of the + * implicit callback, and the return value will be used as the `result` value. + * (i.e. a `rejected` of the returned Promise becomes the `err` callback + * argument, and a `resolved` value becomes the `result`.) + * + * Note, due to JavaScript limitations, we can only detect native `async` + * functions and not transpilied implementations. + * Your environment must have `async`/`await` support for this to work. + * (e.g. Node > v7.6, or a recent version of a modern browser). + * If you are using `async` functions through a transpiler (e.g. Babel), you + * must still wrap the function with [asyncify]{@link module:Utils.asyncify}, + * because the `async function` will be compiled to an ordinary function that + * returns a promise. + * + * @typedef {Function} AsyncFunction + * @static + */ -/** - * Syntactic sugar for invoking a function and expanding an array for arguments. - * - * Common use case would be to use `Function.prototype.apply`. - * - * ```js - * function f(x, y, z) {} - * var args = [1, 2, 3]; - * f.apply(null, args); - * ``` - * - * With `spread` this example can be re-written. - * - * ```js - * spread(function(x, y, z) {})([1, 2, 3]); - * ``` - * - * @param {Function} callback - * @returns {Function} - */ -module.exports = function spread(callback) { - return function wrap(arr) { - return callback.apply(null, arr); - }; -}; + var index = { + apply, + applyEach: applyEach$1, + applyEachSeries, + asyncify, + auto, + autoInject, + cargo, + cargoQueue: cargo$1, + compose, + concat: concat$1, + concatLimit: concatLimit$1, + concatSeries: concatSeries$1, + constant, + detect: detect$1, + detectLimit: detectLimit$1, + detectSeries: detectSeries$1, + dir, + doUntil, + doWhilst: doWhilst$1, + each, + eachLimit: eachLimit$2, + eachOf: eachOf$1, + eachOfLimit: eachOfLimit$2, + eachOfSeries: eachOfSeries$1, + eachSeries: eachSeries$1, + ensureAsync, + every: every$1, + everyLimit: everyLimit$1, + everySeries: everySeries$1, + filter: filter$1, + filterLimit: filterLimit$1, + filterSeries: filterSeries$1, + forever: forever$1, + groupBy, + groupByLimit: groupByLimit$1, + groupBySeries, + log, + map: map$1, + mapLimit: mapLimit$1, + mapSeries: mapSeries$1, + mapValues, + mapValuesLimit: mapValuesLimit$1, + mapValuesSeries, + memoize, + nextTick, + parallel: parallel$1, + parallelLimit, + priorityQueue, + queue: queue$1, + race: race$1, + reduce: reduce$1, + reduceRight, + reflect, + reflectAll, + reject: reject$2, + rejectLimit: rejectLimit$1, + rejectSeries: rejectSeries$1, + retry, + retryable, + seq, + series, + setImmediate: setImmediate$1, + some: some$1, + someLimit: someLimit$1, + someSeries: someSeries$1, + sortBy: sortBy$1, + timeout, + times, + timesLimit, + timesSeries, + transform, + tryEach: tryEach$1, + unmemoize, + until, + waterfall: waterfall$1, + whilst: whilst$1, + + // aliases + all: every$1, + allLimit: everyLimit$1, + allSeries: everySeries$1, + any: some$1, + anyLimit: someLimit$1, + anySeries: someSeries$1, + find: detect$1, + findLimit: detectLimit$1, + findSeries: detectSeries$1, + flatMap: concat$1, + flatMapLimit: concatLimit$1, + flatMapSeries: concatSeries$1, + forEach: each, + forEachSeries: eachSeries$1, + forEachLimit: eachLimit$2, + forEachOf: eachOf$1, + forEachOfSeries: eachOfSeries$1, + forEachOfLimit: eachOfLimit$2, + inject: reduce$1, + foldl: reduce$1, + foldr: reduceRight, + select: filter$1, + selectLimit: filterLimit$1, + selectSeries: filterSeries$1, + wrapSync: asyncify, + during: whilst$1, + doDuring: doWhilst$1 + }; + + exports.default = index; + exports.apply = apply; + exports.applyEach = applyEach$1; + exports.applyEachSeries = applyEachSeries; + exports.asyncify = asyncify; + exports.auto = auto; + exports.autoInject = autoInject; + exports.cargo = cargo; + exports.cargoQueue = cargo$1; + exports.compose = compose; + exports.concat = concat$1; + exports.concatLimit = concatLimit$1; + exports.concatSeries = concatSeries$1; + exports.constant = constant; + exports.detect = detect$1; + exports.detectLimit = detectLimit$1; + exports.detectSeries = detectSeries$1; + exports.dir = dir; + exports.doUntil = doUntil; + exports.doWhilst = doWhilst$1; + exports.each = each; + exports.eachLimit = eachLimit$2; + exports.eachOf = eachOf$1; + exports.eachOfLimit = eachOfLimit$2; + exports.eachOfSeries = eachOfSeries$1; + exports.eachSeries = eachSeries$1; + exports.ensureAsync = ensureAsync; + exports.every = every$1; + exports.everyLimit = everyLimit$1; + exports.everySeries = everySeries$1; + exports.filter = filter$1; + exports.filterLimit = filterLimit$1; + exports.filterSeries = filterSeries$1; + exports.forever = forever$1; + exports.groupBy = groupBy; + exports.groupByLimit = groupByLimit$1; + exports.groupBySeries = groupBySeries; + exports.log = log; + exports.map = map$1; + exports.mapLimit = mapLimit$1; + exports.mapSeries = mapSeries$1; + exports.mapValues = mapValues; + exports.mapValuesLimit = mapValuesLimit$1; + exports.mapValuesSeries = mapValuesSeries; + exports.memoize = memoize; + exports.nextTick = nextTick; + exports.parallel = parallel$1; + exports.parallelLimit = parallelLimit; + exports.priorityQueue = priorityQueue; + exports.queue = queue$1; + exports.race = race$1; + exports.reduce = reduce$1; + exports.reduceRight = reduceRight; + exports.reflect = reflect; + exports.reflectAll = reflectAll; + exports.reject = reject$2; + exports.rejectLimit = rejectLimit$1; + exports.rejectSeries = rejectSeries$1; + exports.retry = retry; + exports.retryable = retryable; + exports.seq = seq; + exports.series = series; + exports.setImmediate = setImmediate$1; + exports.some = some$1; + exports.someLimit = someLimit$1; + exports.someSeries = someSeries$1; + exports.sortBy = sortBy$1; + exports.timeout = timeout; + exports.times = times; + exports.timesLimit = timesLimit; + exports.timesSeries = timesSeries; + exports.transform = transform; + exports.tryEach = tryEach$1; + exports.unmemoize = unmemoize; + exports.until = until; + exports.waterfall = waterfall$1; + exports.whilst = whilst$1; + exports.all = every$1; + exports.allLimit = everyLimit$1; + exports.allSeries = everySeries$1; + exports.any = some$1; + exports.anyLimit = someLimit$1; + exports.anySeries = someSeries$1; + exports.find = detect$1; + exports.findLimit = detectLimit$1; + exports.findSeries = detectSeries$1; + exports.flatMap = concat$1; + exports.flatMapLimit = concatLimit$1; + exports.flatMapSeries = concatSeries$1; + exports.forEach = each; + exports.forEachSeries = eachSeries$1; + exports.forEachLimit = eachLimit$2; + exports.forEachOf = eachOf$1; + exports.forEachOfSeries = eachOfSeries$1; + exports.forEachOfLimit = eachOfLimit$2; + exports.inject = reduce$1; + exports.foldl = reduce$1; + exports.foldr = reduceRight; + exports.select = filter$1; + exports.selectLimit = filterLimit$1; + exports.selectSeries = filterSeries$1; + exports.wrapSync = asyncify; + exports.during = whilst$1; + exports.doDuring = doWhilst$1; + + Object.defineProperty(exports, '__esModule', { value: true }); + +}))); /***/ }), -/***/ 20328: +/***/ 14812: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; - +module.exports = +{ + parallel : __nccwpck_require__(8210), + serial : __nccwpck_require__(50445), + serialOrdered : __nccwpck_require__(3578) +}; -var bind = __nccwpck_require__(77065); -/*global toString:true*/ +/***/ }), -// utils is a library of generic helper functions non-specific to axios +/***/ 1700: +/***/ ((module) => { -var toString = Object.prototype.toString; +// API +module.exports = abort; /** - * Determine if a value is an Array + * Aborts leftover active jobs * - * @param {Object} val The value to test - * @returns {boolean} True if value is an Array, otherwise false + * @param {object} state - current state object */ -function isArray(val) { - return toString.call(val) === '[object Array]'; +function abort(state) +{ + Object.keys(state.jobs).forEach(clean.bind(state)); + + // reset leftover jobs + state.jobs = {}; } /** - * Determine if a value is undefined + * Cleans up leftover job by invoking abort function for the provided job id * - * @param {Object} val The value to test - * @returns {boolean} True if the value is undefined, otherwise false + * @this state + * @param {string|number} key - job id to abort */ -function isUndefined(val) { - return typeof val === 'undefined'; +function clean(key) +{ + if (typeof this.jobs[key] == 'function') + { + this.jobs[key](); + } } + +/***/ }), + +/***/ 72794: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var defer = __nccwpck_require__(15295); + +// API +module.exports = async; + /** - * Determine if a value is a Buffer + * Runs provided callback asynchronously + * even if callback itself is not * - * @param {Object} val The value to test - * @returns {boolean} True if value is a Buffer, otherwise false + * @param {function} callback - callback to invoke + * @returns {function} - augmented callback */ -function isBuffer(val) { - return val !== null && !isUndefined(val) && val.constructor !== null && !isUndefined(val.constructor) - && typeof val.constructor.isBuffer === 'function' && val.constructor.isBuffer(val); +function async(callback) +{ + var isAsync = false; + + // check if async happened + defer(function() { isAsync = true; }); + + return function async_callback(err, result) + { + if (isAsync) + { + callback(err, result); + } + else + { + defer(function nextTick_callback() + { + callback(err, result); + }); + } + }; } + +/***/ }), + +/***/ 15295: +/***/ ((module) => { + +module.exports = defer; + /** - * Determine if a value is an ArrayBuffer + * Runs provided function on next iteration of the event loop * - * @param {Object} val The value to test - * @returns {boolean} True if value is an ArrayBuffer, otherwise false + * @param {function} fn - function to run */ -function isArrayBuffer(val) { - return toString.call(val) === '[object ArrayBuffer]'; +function defer(fn) +{ + var nextTick = typeof setImmediate == 'function' + ? setImmediate + : ( + typeof process == 'object' && typeof process.nextTick == 'function' + ? process.nextTick + : null + ); + + if (nextTick) + { + nextTick(fn); + } + else + { + setTimeout(fn, 0); + } } + +/***/ }), + +/***/ 9023: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var async = __nccwpck_require__(72794) + , abort = __nccwpck_require__(1700) + ; + +// API +module.exports = iterate; + /** - * Determine if a value is a FormData + * Iterates over each job object * - * @param {Object} val The value to test - * @returns {boolean} True if value is an FormData, otherwise false + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {object} state - current job status + * @param {function} callback - invoked when all elements processed */ -function isFormData(val) { - return (typeof FormData !== 'undefined') && (val instanceof FormData); +function iterate(list, iterator, state, callback) +{ + // store current index + var key = state['keyedList'] ? state['keyedList'][state.index] : state.index; + + state.jobs[key] = runJob(iterator, key, list[key], function(error, output) + { + // don't repeat yourself + // skip secondary callbacks + if (!(key in state.jobs)) + { + return; + } + + // clean up jobs + delete state.jobs[key]; + + if (error) + { + // don't process rest of the results + // stop still active jobs + // and reset the list + abort(state); + } + else + { + state.results[key] = output; + } + + // return salvaged results + callback(error, state.results); + }); } /** - * Determine if a value is a view on an ArrayBuffer + * Runs iterator over provided job element * - * @param {Object} val The value to test - * @returns {boolean} True if value is a view on an ArrayBuffer, otherwise false + * @param {function} iterator - iterator to invoke + * @param {string|number} key - key/index of the element in the list of jobs + * @param {mixed} item - job description + * @param {function} callback - invoked after iterator is done with the job + * @returns {function|mixed} - job abort function or something else */ -function isArrayBufferView(val) { - var result; - if ((typeof ArrayBuffer !== 'undefined') && (ArrayBuffer.isView)) { - result = ArrayBuffer.isView(val); - } else { - result = (val) && (val.buffer) && (val.buffer instanceof ArrayBuffer); +function runJob(iterator, key, item, callback) +{ + var aborter; + + // allow shortcut if iterator expects only two arguments + if (iterator.length == 2) + { + aborter = iterator(item, async(callback)); + } + // otherwise go with full three arguments + else + { + aborter = iterator(item, key, async(callback)); } - return result; -} -/** - * Determine if a value is a String - * - * @param {Object} val The value to test - * @returns {boolean} True if value is a String, otherwise false - */ -function isString(val) { - return typeof val === 'string'; + return aborter; } + +/***/ }), + +/***/ 42474: +/***/ ((module) => { + +// API +module.exports = state; + /** - * Determine if a value is a Number + * Creates initial state object + * for iteration over list * - * @param {Object} val The value to test - * @returns {boolean} True if value is a Number, otherwise false + * @param {array|object} list - list to iterate over + * @param {function|null} sortMethod - function to use for keys sort, + * or `null` to keep them as is + * @returns {object} - initial state object */ -function isNumber(val) { - return typeof val === 'number'; +function state(list, sortMethod) +{ + var isNamedList = !Array.isArray(list) + , initState = + { + index : 0, + keyedList: isNamedList || sortMethod ? Object.keys(list) : null, + jobs : {}, + results : isNamedList ? {} : [], + size : isNamedList ? Object.keys(list).length : list.length + } + ; + + if (sortMethod) + { + // sort array keys based on it's values + // sort object's keys just on own merit + initState.keyedList.sort(isNamedList ? sortMethod : function(a, b) + { + return sortMethod(list[a], list[b]); + }); + } + + return initState; } + +/***/ }), + +/***/ 37942: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var abort = __nccwpck_require__(1700) + , async = __nccwpck_require__(72794) + ; + +// API +module.exports = terminator; + /** - * Determine if a value is an Object + * Terminates jobs in the attached state context * - * @param {Object} val The value to test - * @returns {boolean} True if value is an Object, otherwise false + * @this AsyncKitState# + * @param {function} callback - final callback to invoke after termination */ -function isObject(val) { - return val !== null && typeof val === 'object'; +function terminator(callback) +{ + if (!Object.keys(this.jobs).length) + { + return; + } + + // fast forward iteration index + this.index = this.size; + + // abort jobs + abort(this); + + // send back results we have so far + async(callback)(null, this.results); } + +/***/ }), + +/***/ 8210: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var iterate = __nccwpck_require__(9023) + , initState = __nccwpck_require__(42474) + , terminator = __nccwpck_require__(37942) + ; + +// Public API +module.exports = parallel; + /** - * Determine if a value is a plain Object + * Runs iterator over provided array elements in parallel * - * @param {Object} val The value to test - * @return {boolean} True if value is a plain Object, otherwise false + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator */ -function isPlainObject(val) { - if (toString.call(val) !== '[object Object]') { - return false; +function parallel(list, iterator, callback) +{ + var state = initState(list); + + while (state.index < (state['keyedList'] || list).length) + { + iterate(list, iterator, state, function(error, result) + { + if (error) + { + callback(error, result); + return; + } + + // looks like it's the last one + if (Object.keys(state.jobs).length === 0) + { + callback(null, state.results); + return; + } + }); + + state.index++; } - var prototype = Object.getPrototypeOf(val); - return prototype === null || prototype === Object.prototype; + return terminator.bind(state, callback); } + +/***/ }), + +/***/ 50445: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var serialOrdered = __nccwpck_require__(3578); + +// Public API +module.exports = serial; + /** - * Determine if a value is a Date + * Runs iterator over provided array elements in series * - * @param {Object} val The value to test - * @returns {boolean} True if value is a Date, otherwise false + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator */ -function isDate(val) { - return toString.call(val) === '[object Date]'; +function serial(list, iterator, callback) +{ + return serialOrdered(list, iterator, null, callback); } + +/***/ }), + +/***/ 3578: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var iterate = __nccwpck_require__(9023) + , initState = __nccwpck_require__(42474) + , terminator = __nccwpck_require__(37942) + ; + +// Public API +module.exports = serialOrdered; +// sorting helpers +module.exports.ascending = ascending; +module.exports.descending = descending; + /** - * Determine if a value is a File + * Runs iterator over provided sorted array elements in series * - * @param {Object} val The value to test - * @returns {boolean} True if value is a File, otherwise false + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} sortMethod - custom sort function + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator */ -function isFile(val) { - return toString.call(val) === '[object File]'; +function serialOrdered(list, iterator, sortMethod, callback) +{ + var state = initState(list, sortMethod); + + iterate(list, iterator, state, function iteratorHandler(error, result) + { + if (error) + { + callback(error, result); + return; + } + + state.index++; + + // are we there yet? + if (state.index < (state['keyedList'] || list).length) + { + iterate(list, iterator, state, iteratorHandler); + return; + } + + // done here + callback(null, state.results); + }); + + return terminator.bind(state, callback); } +/* + * -- Sort methods + */ + /** - * Determine if a value is a Blob + * sort helper to sort array elements in ascending order * - * @param {Object} val The value to test - * @returns {boolean} True if value is a Blob, otherwise false + * @param {mixed} a - an item to compare + * @param {mixed} b - an item to compare + * @returns {number} - comparison result */ -function isBlob(val) { - return toString.call(val) === '[object Blob]'; +function ascending(a, b) +{ + return a < b ? -1 : a > b ? 1 : 0; } /** - * Determine if a value is a Function + * sort helper to sort array elements in descending order * - * @param {Object} val The value to test - * @returns {boolean} True if value is a Function, otherwise false + * @param {mixed} a - an item to compare + * @param {mixed} b - an item to compare + * @returns {number} - comparison result */ -function isFunction(val) { - return toString.call(val) === '[object Function]'; +function descending(a, b) +{ + return -1 * ascending(a, b); } -/** - * Determine if a value is a Stream + +/***/ }), + +/***/ 96342: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + + +/*! + * Copyright 2010 LearnBoost * - * @param {Object} val The value to test - * @returns {boolean} True if value is a Stream, otherwise false + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ -function isStream(val) { - return isObject(val) && isFunction(val.pipe); -} /** - * Determine if a value is a URLSearchParams object + * Module dependencies. + */ + +var crypto = __nccwpck_require__(76417) + , parse = __nccwpck_require__(78835).parse + ; + +/** + * Valid keys. + */ + +var keys = + [ 'acl' + , 'location' + , 'logging' + , 'notification' + , 'partNumber' + , 'policy' + , 'requestPayment' + , 'torrent' + , 'uploadId' + , 'uploads' + , 'versionId' + , 'versioning' + , 'versions' + , 'website' + ] + +/** + * Return an "Authorization" header value with the given `options` + * in the form of "AWS :" * - * @param {Object} val The value to test - * @returns {boolean} True if value is a URLSearchParams object, otherwise false + * @param {Object} options + * @return {String} + * @api private */ -function isURLSearchParams(val) { - return typeof URLSearchParams !== 'undefined' && val instanceof URLSearchParams; + +function authorization (options) { + return 'AWS ' + options.key + ':' + sign(options) } +module.exports = authorization +module.exports.authorization = authorization + /** - * Trim excess whitespace off the beginning and end of a string + * Simple HMAC-SHA1 Wrapper * - * @param {String} str The String to trim - * @returns {String} The String freed of excess whitespace + * @param {Object} options + * @return {String} + * @api private + */ + +function hmacSha1 (options) { + return crypto.createHmac('sha1', options.secret).update(options.message).digest('base64') +} + +module.exports.hmacSha1 = hmacSha1 + +/** + * Create a base64 sha1 HMAC for `options`. + * + * @param {Object} options + * @return {String} + * @api private */ -function trim(str) { - return str.replace(/^\s*/, '').replace(/\s*$/, ''); + +function sign (options) { + options.message = stringToSign(options) + return hmacSha1(options) } +module.exports.sign = sign /** - * Determine if we're running in a standard browser environment - * - * This allows axios to run in a web worker, and react-native. - * Both environments support XMLHttpRequest, but not fully standard globals. - * - * web workers: - * typeof window -> undefined - * typeof document -> undefined + * Create a base64 sha1 HMAC for `options`. * - * react-native: - * navigator.product -> 'ReactNative' - * nativescript - * navigator.product -> 'NativeScript' or 'NS' + * Specifically to be used with S3 presigned URLs + * + * @param {Object} options + * @return {String} + * @api private */ -function isStandardBrowserEnv() { - if (typeof navigator !== 'undefined' && (navigator.product === 'ReactNative' || - navigator.product === 'NativeScript' || - navigator.product === 'NS')) { - return false; - } - return ( - typeof window !== 'undefined' && - typeof document !== 'undefined' - ); + +function signQuery (options) { + options.message = queryStringToSign(options) + return hmacSha1(options) } +module.exports.signQuery= signQuery /** - * Iterate over an Array or an Object invoking a function for each item. - * - * If `obj` is an Array callback will be called passing - * the value, index, and complete array for each item. + * Return a string for sign() with the given `options`. * - * If 'obj' is an Object callback will be called passing - * the value, key, and complete object for each property. + * Spec: + * + * \n + * \n + * \n + * \n + * [headers\n] + * * - * @param {Object|Array} obj The object to iterate - * @param {Function} fn The callback to invoke for each item + * @param {Object} options + * @return {String} + * @api private */ -function forEach(obj, fn) { - // Don't bother if no value provided - if (obj === null || typeof obj === 'undefined') { - return; - } - - // Force an array if not already something iterable - if (typeof obj !== 'object') { - /*eslint no-param-reassign:0*/ - obj = [obj]; - } - if (isArray(obj)) { - // Iterate over array values - for (var i = 0, l = obj.length; i < l; i++) { - fn.call(null, obj[i], i, obj); - } - } else { - // Iterate over object keys - for (var key in obj) { - if (Object.prototype.hasOwnProperty.call(obj, key)) { - fn.call(null, obj[key], key, obj); - } - } - } +function stringToSign (options) { + var headers = options.amazonHeaders || '' + if (headers) headers += '\n' + var r = + [ options.verb + , options.md5 + , options.contentType + , options.date ? options.date.toUTCString() : '' + , headers + options.resource + ] + return r.join('\n') } +module.exports.stringToSign = stringToSign /** - * Accepts varargs expecting each argument to be an object, then - * immutably merges the properties of each object and returns result. - * - * When multiple objects contain the same key the later object in - * the arguments list will take precedence. - * - * Example: + * Return a string for sign() with the given `options`, but is meant exclusively + * for S3 presigned URLs * - * ```js - * var result = merge({foo: 123}, {foo: 456}); - * console.log(result.foo); // outputs 456 - * ``` + * Spec: + * + * \n + * * - * @param {Object} obj1 Object to merge - * @returns {Object} Result of all merge properties + * @param {Object} options + * @return {String} + * @api private */ -function merge(/* obj1, obj2, obj3, ... */) { - var result = {}; - function assignValue(val, key) { - if (isPlainObject(result[key]) && isPlainObject(val)) { - result[key] = merge(result[key], val); - } else if (isPlainObject(val)) { - result[key] = merge({}, val); - } else if (isArray(val)) { - result[key] = val.slice(); - } else { - result[key] = val; - } - } - for (var i = 0, l = arguments.length; i < l; i++) { - forEach(arguments[i], assignValue); - } - return result; +function queryStringToSign (options){ + return 'GET\n\n\n' + options.date + '\n' + options.resource } +module.exports.queryStringToSign = queryStringToSign /** - * Extends object a by mutably adding to it the properties of object b. + * Perform the following: * - * @param {Object} a The object to be extended - * @param {Object} b The object to copy properties from - * @param {Object} thisArg The object to bind function to - * @return {Object} The resulting value of object a + * - ignore non-amazon headers + * - lowercase fields + * - sort lexicographically + * - trim whitespace between ":" + * - join with newline + * + * @param {Object} headers + * @return {String} + * @api private */ -function extend(a, b, thisArg) { - forEach(b, function assignValue(val, key) { - if (thisArg && typeof val === 'function') { - a[key] = bind(val, thisArg); - } else { - a[key] = val; - } - }); - return a; + +function canonicalizeHeaders (headers) { + var buf = [] + , fields = Object.keys(headers) + ; + for (var i = 0, len = fields.length; i < len; ++i) { + var field = fields[i] + , val = headers[field] + , field = field.toLowerCase() + ; + if (0 !== field.indexOf('x-amz')) continue + buf.push(field + ':' + val) + } + return buf.sort().join('\n') } +module.exports.canonicalizeHeaders = canonicalizeHeaders /** - * Remove byte order marker. This catches EF BB BF (the UTF-8 BOM) + * Perform the following: * - * @param {string} content with BOM - * @return {string} content value without BOM + * - ignore non sub-resources + * - sort lexicographically + * + * @param {String} resource + * @return {String} + * @api private */ -function stripBOM(content) { - if (content.charCodeAt(0) === 0xFEFF) { - content = content.slice(1); - } - return content; -} -module.exports = { - isArray: isArray, - isArrayBuffer: isArrayBuffer, - isBuffer: isBuffer, - isFormData: isFormData, - isArrayBufferView: isArrayBufferView, - isString: isString, - isNumber: isNumber, - isObject: isObject, - isPlainObject: isPlainObject, - isUndefined: isUndefined, - isDate: isDate, - isFile: isFile, - isBlob: isBlob, - isFunction: isFunction, - isStream: isStream, - isURLSearchParams: isURLSearchParams, - isStandardBrowserEnv: isStandardBrowserEnv, - forEach: forEach, - merge: merge, - extend: extend, - trim: trim, - stripBOM: stripBOM -}; +function canonicalizeResource (resource) { + var url = parse(resource, true) + , path = url.pathname + , buf = [] + ; + + Object.keys(url.query).forEach(function(key){ + if (!~keys.indexOf(key)) return + var val = '' == url.query[key] ? '' : '=' + encodeURIComponent(url.query[key]) + buf.push(key + val) + }) + + return path + (buf.length ? '?' + buf.sort().join('&') : '') +} +module.exports.canonicalizeResource = canonicalizeResource /***/ }), -/***/ 9417: -/***/ ((module) => { - -"use strict"; +/***/ 16071: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -module.exports = balanced; -function balanced(a, b, str) { - if (a instanceof RegExp) a = maybeMatch(a, str); - if (b instanceof RegExp) b = maybeMatch(b, str); +var aws4 = exports, + url = __nccwpck_require__(78835), + querystring = __nccwpck_require__(71191), + crypto = __nccwpck_require__(76417), + lru = __nccwpck_require__(74225), + credentialsCache = lru(1000) - var r = range(a, b, str); +// http://docs.amazonwebservices.com/general/latest/gr/signature-version-4.html - return r && { - start: r[0], - end: r[1], - pre: str.slice(0, r[0]), - body: str.slice(r[0] + a.length, r[1]), - post: str.slice(r[1] + b.length) - }; +function hmac(key, string, encoding) { + return crypto.createHmac('sha256', key).update(string, 'utf8').digest(encoding) } -function maybeMatch(reg, str) { - var m = str.match(reg); - return m ? m[0] : null; +function hash(string, encoding) { + return crypto.createHash('sha256').update(string, 'utf8').digest(encoding) } -balanced.range = range; -function range(a, b, str) { - var begs, beg, left, right, result; - var ai = str.indexOf(a); - var bi = str.indexOf(b, ai + 1); - var i = ai; +// This function assumes the string has already been percent encoded +function encodeRfc3986(urlEncodedString) { + return urlEncodedString.replace(/[!'()*]/g, function(c) { + return '%' + c.charCodeAt(0).toString(16).toUpperCase() + }) +} - if (ai >= 0 && bi > 0) { - begs = []; - left = str.length; +function encodeRfc3986Full(str) { + return encodeRfc3986(encodeURIComponent(str)) +} - while (i >= 0 && !result) { - if (i == ai) { - begs.push(i); - ai = str.indexOf(a, i + 1); - } else if (begs.length == 1) { - result = [ begs.pop(), bi ]; - } else { - beg = begs.pop(); - if (beg < left) { - left = beg; - right = bi; - } +// A bit of a combination of: +// https://github.com/aws/aws-sdk-java-v2/blob/dc695de6ab49ad03934e1b02e7263abbd2354be0/core/auth/src/main/java/software/amazon/awssdk/auth/signer/internal/AbstractAws4Signer.java#L59 +// https://github.com/aws/aws-sdk-js/blob/18cb7e5b463b46239f9fdd4a65e2ff8c81831e8f/lib/signers/v4.js#L191-L199 +// https://github.com/mhart/aws4fetch/blob/b3aed16b6f17384cf36ea33bcba3c1e9f3bdfefd/src/main.js#L25-L34 +var HEADERS_TO_IGNORE = { + 'authorization': true, + 'connection': true, + 'x-amzn-trace-id': true, + 'user-agent': true, + 'expect': true, + 'presigned-expires': true, + 'range': true, +} - bi = str.indexOf(b, i + 1); - } +// request: { path | body, [host], [method], [headers], [service], [region] } +// credentials: { accessKeyId, secretAccessKey, [sessionToken] } +function RequestSigner(request, credentials) { - i = ai < bi && ai >= 0 ? ai : bi; - } + if (typeof request === 'string') request = url.parse(request) - if (begs.length) { - result = [ left, right ]; + var headers = request.headers = (request.headers || {}), + hostParts = (!this.service || !this.region) && this.matchHost(request.hostname || request.host || headers.Host || headers.host) + + this.request = request + this.credentials = credentials || this.defaultCredentials() + + this.service = request.service || hostParts[0] || '' + this.region = request.region || hostParts[1] || 'us-east-1' + + // SES uses a different domain from the service name + if (this.service === 'email') this.service = 'ses' + + if (!request.method && request.body) + request.method = 'POST' + + if (!headers.Host && !headers.host) { + headers.Host = request.hostname || request.host || this.createHost() + + // If a port is specified explicitly, use it as is + if (request.port) + headers.Host += ':' + request.port + } + if (!request.hostname && !request.host) + request.hostname = headers.Host || headers.host + + this.isCodeCommitGit = this.service === 'codecommit' && request.method === 'GIT' +} + +RequestSigner.prototype.matchHost = function(host) { + var match = (host || '').match(/([^\.]+)\.(?:([^\.]*)\.)?amazonaws\.com(\.cn)?$/) + var hostParts = (match || []).slice(1, 3) + + // ES's hostParts are sometimes the other way round, if the value that is expected + // to be region equals ‘es’ switch them back + // e.g. search-cluster-name-aaaa00aaaa0aaa0aaaaaaa0aaa.us-east-1.es.amazonaws.com + if (hostParts[1] === 'es') + hostParts = hostParts.reverse() + + if (hostParts[1] == 's3') { + hostParts[0] = 's3' + hostParts[1] = 'us-east-1' + } else { + for (var i = 0; i < 2; i++) { + if (/^s3-/.test(hostParts[i])) { + hostParts[1] = hostParts[i].slice(3) + hostParts[0] = 's3' + break + } } } - return result; + return hostParts } +// http://docs.aws.amazon.com/general/latest/gr/rande.html +RequestSigner.prototype.isSingleRegion = function() { + // Special case for S3 and SimpleDB in us-east-1 + if (['s3', 'sdb'].indexOf(this.service) >= 0 && this.region === 'us-east-1') return true -/***/ }), + return ['cloudfront', 'ls', 'route53', 'iam', 'importexport', 'sts'] + .indexOf(this.service) >= 0 +} -/***/ 45447: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +RequestSigner.prototype.createHost = function() { + var region = this.isSingleRegion() ? '' : '.' + this.region, + subdomain = this.service === 'ses' ? 'email' : this.service + return subdomain + region + '.amazonaws.com' +} -"use strict"; +RequestSigner.prototype.prepareRequest = function() { + this.parsePath() + var request = this.request, headers = request.headers, query -var crypto_hash_sha512 = __nccwpck_require__(68729).lowlevel.crypto_hash; + if (request.signQuery) { -/* - * This file is a 1:1 port from the OpenBSD blowfish.c and bcrypt_pbkdf.c. As a - * result, it retains the original copyright and license. The two files are - * under slightly different (but compatible) licenses, and are here combined in - * one file. - * - * Credit for the actual porting work goes to: - * Devi Mandiri - */ + this.parsedPath.query = query = this.parsedPath.query || {} -/* - * The Blowfish portions are under the following license: - * - * Blowfish block cipher for OpenBSD - * Copyright 1997 Niels Provos - * All rights reserved. - * - * Implementation advice by David Mazieres . - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions - * are met: - * 1. Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. The name of the author may not be used to endorse or promote products - * derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR - * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES - * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. - * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, - * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT - * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF - * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ + if (this.credentials.sessionToken) + query['X-Amz-Security-Token'] = this.credentials.sessionToken -/* - * The bcrypt_pbkdf portions are under the following license: - * - * Copyright (c) 2013 Ted Unangst - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES - * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR - * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES - * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN - * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF - * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - */ + if (this.service === 's3' && !query['X-Amz-Expires']) + query['X-Amz-Expires'] = 86400 -/* - * Performance improvements (Javascript-specific): - * - * Copyright 2016, Joyent Inc - * Author: Alex Wilson - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES - * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR - * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES - * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN - * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF - * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - */ + if (query['X-Amz-Date']) + this.datetime = query['X-Amz-Date'] + else + query['X-Amz-Date'] = this.getDateTime() -// Ported from OpenBSD bcrypt_pbkdf.c v1.9 + query['X-Amz-Algorithm'] = 'AWS4-HMAC-SHA256' + query['X-Amz-Credential'] = this.credentials.accessKeyId + '/' + this.credentialString() + query['X-Amz-SignedHeaders'] = this.signedHeaders() -var BLF_J = 0; + } else { -var Blowfish = function() { - this.S = [ - new Uint32Array([ - 0xd1310ba6, 0x98dfb5ac, 0x2ffd72db, 0xd01adfb7, - 0xb8e1afed, 0x6a267e96, 0xba7c9045, 0xf12c7f99, - 0x24a19947, 0xb3916cf7, 0x0801f2e2, 0x858efc16, - 0x636920d8, 0x71574e69, 0xa458fea3, 0xf4933d7e, - 0x0d95748f, 0x728eb658, 0x718bcd58, 0x82154aee, - 0x7b54a41d, 0xc25a59b5, 0x9c30d539, 0x2af26013, - 0xc5d1b023, 0x286085f0, 0xca417918, 0xb8db38ef, - 0x8e79dcb0, 0x603a180e, 0x6c9e0e8b, 0xb01e8a3e, - 0xd71577c1, 0xbd314b27, 0x78af2fda, 0x55605c60, - 0xe65525f3, 0xaa55ab94, 0x57489862, 0x63e81440, - 0x55ca396a, 0x2aab10b6, 0xb4cc5c34, 0x1141e8ce, - 0xa15486af, 0x7c72e993, 0xb3ee1411, 0x636fbc2a, - 0x2ba9c55d, 0x741831f6, 0xce5c3e16, 0x9b87931e, - 0xafd6ba33, 0x6c24cf5c, 0x7a325381, 0x28958677, - 0x3b8f4898, 0x6b4bb9af, 0xc4bfe81b, 0x66282193, - 0x61d809cc, 0xfb21a991, 0x487cac60, 0x5dec8032, - 0xef845d5d, 0xe98575b1, 0xdc262302, 0xeb651b88, - 0x23893e81, 0xd396acc5, 0x0f6d6ff3, 0x83f44239, - 0x2e0b4482, 0xa4842004, 0x69c8f04a, 0x9e1f9b5e, - 0x21c66842, 0xf6e96c9a, 0x670c9c61, 0xabd388f0, - 0x6a51a0d2, 0xd8542f68, 0x960fa728, 0xab5133a3, - 0x6eef0b6c, 0x137a3be4, 0xba3bf050, 0x7efb2a98, - 0xa1f1651d, 0x39af0176, 0x66ca593e, 0x82430e88, - 0x8cee8619, 0x456f9fb4, 0x7d84a5c3, 0x3b8b5ebe, - 0xe06f75d8, 0x85c12073, 0x401a449f, 0x56c16aa6, - 0x4ed3aa62, 0x363f7706, 0x1bfedf72, 0x429b023d, - 0x37d0d724, 0xd00a1248, 0xdb0fead3, 0x49f1c09b, - 0x075372c9, 0x80991b7b, 0x25d479d8, 0xf6e8def7, - 0xe3fe501a, 0xb6794c3b, 0x976ce0bd, 0x04c006ba, - 0xc1a94fb6, 0x409f60c4, 0x5e5c9ec2, 0x196a2463, - 0x68fb6faf, 0x3e6c53b5, 0x1339b2eb, 0x3b52ec6f, - 0x6dfc511f, 0x9b30952c, 0xcc814544, 0xaf5ebd09, - 0xbee3d004, 0xde334afd, 0x660f2807, 0x192e4bb3, - 0xc0cba857, 0x45c8740f, 0xd20b5f39, 0xb9d3fbdb, - 0x5579c0bd, 0x1a60320a, 0xd6a100c6, 0x402c7279, - 0x679f25fe, 0xfb1fa3cc, 0x8ea5e9f8, 0xdb3222f8, - 0x3c7516df, 0xfd616b15, 0x2f501ec8, 0xad0552ab, - 0x323db5fa, 0xfd238760, 0x53317b48, 0x3e00df82, - 0x9e5c57bb, 0xca6f8ca0, 0x1a87562e, 0xdf1769db, - 0xd542a8f6, 0x287effc3, 0xac6732c6, 0x8c4f5573, - 0x695b27b0, 0xbbca58c8, 0xe1ffa35d, 0xb8f011a0, - 0x10fa3d98, 0xfd2183b8, 0x4afcb56c, 0x2dd1d35b, - 0x9a53e479, 0xb6f84565, 0xd28e49bc, 0x4bfb9790, - 0xe1ddf2da, 0xa4cb7e33, 0x62fb1341, 0xcee4c6e8, - 0xef20cada, 0x36774c01, 0xd07e9efe, 0x2bf11fb4, - 0x95dbda4d, 0xae909198, 0xeaad8e71, 0x6b93d5a0, - 0xd08ed1d0, 0xafc725e0, 0x8e3c5b2f, 0x8e7594b7, - 0x8ff6e2fb, 0xf2122b64, 0x8888b812, 0x900df01c, - 0x4fad5ea0, 0x688fc31c, 0xd1cff191, 0xb3a8c1ad, - 0x2f2f2218, 0xbe0e1777, 0xea752dfe, 0x8b021fa1, - 0xe5a0cc0f, 0xb56f74e8, 0x18acf3d6, 0xce89e299, - 0xb4a84fe0, 0xfd13e0b7, 0x7cc43b81, 0xd2ada8d9, - 0x165fa266, 0x80957705, 0x93cc7314, 0x211a1477, - 0xe6ad2065, 0x77b5fa86, 0xc75442f5, 0xfb9d35cf, - 0xebcdaf0c, 0x7b3e89a0, 0xd6411bd3, 0xae1e7e49, - 0x00250e2d, 0x2071b35e, 0x226800bb, 0x57b8e0af, - 0x2464369b, 0xf009b91e, 0x5563911d, 0x59dfa6aa, - 0x78c14389, 0xd95a537f, 0x207d5ba2, 0x02e5b9c5, - 0x83260376, 0x6295cfa9, 0x11c81968, 0x4e734a41, - 0xb3472dca, 0x7b14a94a, 0x1b510052, 0x9a532915, - 0xd60f573f, 0xbc9bc6e4, 0x2b60a476, 0x81e67400, - 0x08ba6fb5, 0x571be91f, 0xf296ec6b, 0x2a0dd915, - 0xb6636521, 0xe7b9f9b6, 0xff34052e, 0xc5855664, - 0x53b02d5d, 0xa99f8fa1, 0x08ba4799, 0x6e85076a]), - new Uint32Array([ - 0x4b7a70e9, 0xb5b32944, 0xdb75092e, 0xc4192623, - 0xad6ea6b0, 0x49a7df7d, 0x9cee60b8, 0x8fedb266, - 0xecaa8c71, 0x699a17ff, 0x5664526c, 0xc2b19ee1, - 0x193602a5, 0x75094c29, 0xa0591340, 0xe4183a3e, - 0x3f54989a, 0x5b429d65, 0x6b8fe4d6, 0x99f73fd6, - 0xa1d29c07, 0xefe830f5, 0x4d2d38e6, 0xf0255dc1, - 0x4cdd2086, 0x8470eb26, 0x6382e9c6, 0x021ecc5e, - 0x09686b3f, 0x3ebaefc9, 0x3c971814, 0x6b6a70a1, - 0x687f3584, 0x52a0e286, 0xb79c5305, 0xaa500737, - 0x3e07841c, 0x7fdeae5c, 0x8e7d44ec, 0x5716f2b8, - 0xb03ada37, 0xf0500c0d, 0xf01c1f04, 0x0200b3ff, - 0xae0cf51a, 0x3cb574b2, 0x25837a58, 0xdc0921bd, - 0xd19113f9, 0x7ca92ff6, 0x94324773, 0x22f54701, - 0x3ae5e581, 0x37c2dadc, 0xc8b57634, 0x9af3dda7, - 0xa9446146, 0x0fd0030e, 0xecc8c73e, 0xa4751e41, - 0xe238cd99, 0x3bea0e2f, 0x3280bba1, 0x183eb331, - 0x4e548b38, 0x4f6db908, 0x6f420d03, 0xf60a04bf, - 0x2cb81290, 0x24977c79, 0x5679b072, 0xbcaf89af, - 0xde9a771f, 0xd9930810, 0xb38bae12, 0xdccf3f2e, - 0x5512721f, 0x2e6b7124, 0x501adde6, 0x9f84cd87, - 0x7a584718, 0x7408da17, 0xbc9f9abc, 0xe94b7d8c, - 0xec7aec3a, 0xdb851dfa, 0x63094366, 0xc464c3d2, - 0xef1c1847, 0x3215d908, 0xdd433b37, 0x24c2ba16, - 0x12a14d43, 0x2a65c451, 0x50940002, 0x133ae4dd, - 0x71dff89e, 0x10314e55, 0x81ac77d6, 0x5f11199b, - 0x043556f1, 0xd7a3c76b, 0x3c11183b, 0x5924a509, - 0xf28fe6ed, 0x97f1fbfa, 0x9ebabf2c, 0x1e153c6e, - 0x86e34570, 0xeae96fb1, 0x860e5e0a, 0x5a3e2ab3, - 0x771fe71c, 0x4e3d06fa, 0x2965dcb9, 0x99e71d0f, - 0x803e89d6, 0x5266c825, 0x2e4cc978, 0x9c10b36a, - 0xc6150eba, 0x94e2ea78, 0xa5fc3c53, 0x1e0a2df4, - 0xf2f74ea7, 0x361d2b3d, 0x1939260f, 0x19c27960, - 0x5223a708, 0xf71312b6, 0xebadfe6e, 0xeac31f66, - 0xe3bc4595, 0xa67bc883, 0xb17f37d1, 0x018cff28, - 0xc332ddef, 0xbe6c5aa5, 0x65582185, 0x68ab9802, - 0xeecea50f, 0xdb2f953b, 0x2aef7dad, 0x5b6e2f84, - 0x1521b628, 0x29076170, 0xecdd4775, 0x619f1510, - 0x13cca830, 0xeb61bd96, 0x0334fe1e, 0xaa0363cf, - 0xb5735c90, 0x4c70a239, 0xd59e9e0b, 0xcbaade14, - 0xeecc86bc, 0x60622ca7, 0x9cab5cab, 0xb2f3846e, - 0x648b1eaf, 0x19bdf0ca, 0xa02369b9, 0x655abb50, - 0x40685a32, 0x3c2ab4b3, 0x319ee9d5, 0xc021b8f7, - 0x9b540b19, 0x875fa099, 0x95f7997e, 0x623d7da8, - 0xf837889a, 0x97e32d77, 0x11ed935f, 0x16681281, - 0x0e358829, 0xc7e61fd6, 0x96dedfa1, 0x7858ba99, - 0x57f584a5, 0x1b227263, 0x9b83c3ff, 0x1ac24696, - 0xcdb30aeb, 0x532e3054, 0x8fd948e4, 0x6dbc3128, - 0x58ebf2ef, 0x34c6ffea, 0xfe28ed61, 0xee7c3c73, - 0x5d4a14d9, 0xe864b7e3, 0x42105d14, 0x203e13e0, - 0x45eee2b6, 0xa3aaabea, 0xdb6c4f15, 0xfacb4fd0, - 0xc742f442, 0xef6abbb5, 0x654f3b1d, 0x41cd2105, - 0xd81e799e, 0x86854dc7, 0xe44b476a, 0x3d816250, - 0xcf62a1f2, 0x5b8d2646, 0xfc8883a0, 0xc1c7b6a3, - 0x7f1524c3, 0x69cb7492, 0x47848a0b, 0x5692b285, - 0x095bbf00, 0xad19489d, 0x1462b174, 0x23820e00, - 0x58428d2a, 0x0c55f5ea, 0x1dadf43e, 0x233f7061, - 0x3372f092, 0x8d937e41, 0xd65fecf1, 0x6c223bdb, - 0x7cde3759, 0xcbee7460, 0x4085f2a7, 0xce77326e, - 0xa6078084, 0x19f8509e, 0xe8efd855, 0x61d99735, - 0xa969a7aa, 0xc50c06c2, 0x5a04abfc, 0x800bcadc, - 0x9e447a2e, 0xc3453484, 0xfdd56705, 0x0e1e9ec9, - 0xdb73dbd3, 0x105588cd, 0x675fda79, 0xe3674340, - 0xc5c43465, 0x713e38d8, 0x3d28f89e, 0xf16dff20, - 0x153e21e7, 0x8fb03d4a, 0xe6e39f2b, 0xdb83adf7]), - new Uint32Array([ - 0xe93d5a68, 0x948140f7, 0xf64c261c, 0x94692934, - 0x411520f7, 0x7602d4f7, 0xbcf46b2e, 0xd4a20068, - 0xd4082471, 0x3320f46a, 0x43b7d4b7, 0x500061af, - 0x1e39f62e, 0x97244546, 0x14214f74, 0xbf8b8840, - 0x4d95fc1d, 0x96b591af, 0x70f4ddd3, 0x66a02f45, - 0xbfbc09ec, 0x03bd9785, 0x7fac6dd0, 0x31cb8504, - 0x96eb27b3, 0x55fd3941, 0xda2547e6, 0xabca0a9a, - 0x28507825, 0x530429f4, 0x0a2c86da, 0xe9b66dfb, - 0x68dc1462, 0xd7486900, 0x680ec0a4, 0x27a18dee, - 0x4f3ffea2, 0xe887ad8c, 0xb58ce006, 0x7af4d6b6, - 0xaace1e7c, 0xd3375fec, 0xce78a399, 0x406b2a42, - 0x20fe9e35, 0xd9f385b9, 0xee39d7ab, 0x3b124e8b, - 0x1dc9faf7, 0x4b6d1856, 0x26a36631, 0xeae397b2, - 0x3a6efa74, 0xdd5b4332, 0x6841e7f7, 0xca7820fb, - 0xfb0af54e, 0xd8feb397, 0x454056ac, 0xba489527, - 0x55533a3a, 0x20838d87, 0xfe6ba9b7, 0xd096954b, - 0x55a867bc, 0xa1159a58, 0xcca92963, 0x99e1db33, - 0xa62a4a56, 0x3f3125f9, 0x5ef47e1c, 0x9029317c, - 0xfdf8e802, 0x04272f70, 0x80bb155c, 0x05282ce3, - 0x95c11548, 0xe4c66d22, 0x48c1133f, 0xc70f86dc, - 0x07f9c9ee, 0x41041f0f, 0x404779a4, 0x5d886e17, - 0x325f51eb, 0xd59bc0d1, 0xf2bcc18f, 0x41113564, - 0x257b7834, 0x602a9c60, 0xdff8e8a3, 0x1f636c1b, - 0x0e12b4c2, 0x02e1329e, 0xaf664fd1, 0xcad18115, - 0x6b2395e0, 0x333e92e1, 0x3b240b62, 0xeebeb922, - 0x85b2a20e, 0xe6ba0d99, 0xde720c8c, 0x2da2f728, - 0xd0127845, 0x95b794fd, 0x647d0862, 0xe7ccf5f0, - 0x5449a36f, 0x877d48fa, 0xc39dfd27, 0xf33e8d1e, - 0x0a476341, 0x992eff74, 0x3a6f6eab, 0xf4f8fd37, - 0xa812dc60, 0xa1ebddf8, 0x991be14c, 0xdb6e6b0d, - 0xc67b5510, 0x6d672c37, 0x2765d43b, 0xdcd0e804, - 0xf1290dc7, 0xcc00ffa3, 0xb5390f92, 0x690fed0b, - 0x667b9ffb, 0xcedb7d9c, 0xa091cf0b, 0xd9155ea3, - 0xbb132f88, 0x515bad24, 0x7b9479bf, 0x763bd6eb, - 0x37392eb3, 0xcc115979, 0x8026e297, 0xf42e312d, - 0x6842ada7, 0xc66a2b3b, 0x12754ccc, 0x782ef11c, - 0x6a124237, 0xb79251e7, 0x06a1bbe6, 0x4bfb6350, - 0x1a6b1018, 0x11caedfa, 0x3d25bdd8, 0xe2e1c3c9, - 0x44421659, 0x0a121386, 0xd90cec6e, 0xd5abea2a, - 0x64af674e, 0xda86a85f, 0xbebfe988, 0x64e4c3fe, - 0x9dbc8057, 0xf0f7c086, 0x60787bf8, 0x6003604d, - 0xd1fd8346, 0xf6381fb0, 0x7745ae04, 0xd736fccc, - 0x83426b33, 0xf01eab71, 0xb0804187, 0x3c005e5f, - 0x77a057be, 0xbde8ae24, 0x55464299, 0xbf582e61, - 0x4e58f48f, 0xf2ddfda2, 0xf474ef38, 0x8789bdc2, - 0x5366f9c3, 0xc8b38e74, 0xb475f255, 0x46fcd9b9, - 0x7aeb2661, 0x8b1ddf84, 0x846a0e79, 0x915f95e2, - 0x466e598e, 0x20b45770, 0x8cd55591, 0xc902de4c, - 0xb90bace1, 0xbb8205d0, 0x11a86248, 0x7574a99e, - 0xb77f19b6, 0xe0a9dc09, 0x662d09a1, 0xc4324633, - 0xe85a1f02, 0x09f0be8c, 0x4a99a025, 0x1d6efe10, - 0x1ab93d1d, 0x0ba5a4df, 0xa186f20f, 0x2868f169, - 0xdcb7da83, 0x573906fe, 0xa1e2ce9b, 0x4fcd7f52, - 0x50115e01, 0xa70683fa, 0xa002b5c4, 0x0de6d027, - 0x9af88c27, 0x773f8641, 0xc3604c06, 0x61a806b5, - 0xf0177a28, 0xc0f586e0, 0x006058aa, 0x30dc7d62, - 0x11e69ed7, 0x2338ea63, 0x53c2dd94, 0xc2c21634, - 0xbbcbee56, 0x90bcb6de, 0xebfc7da1, 0xce591d76, - 0x6f05e409, 0x4b7c0188, 0x39720a3d, 0x7c927c24, - 0x86e3725f, 0x724d9db9, 0x1ac15bb4, 0xd39eb8fc, - 0xed545578, 0x08fca5b5, 0xd83d7cd3, 0x4dad0fc4, - 0x1e50ef5e, 0xb161e6f8, 0xa28514d9, 0x6c51133c, - 0x6fd5c7e7, 0x56e14ec4, 0x362abfce, 0xddc6c837, - 0xd79a3234, 0x92638212, 0x670efa8e, 0x406000e0]), - new Uint32Array([ - 0x3a39ce37, 0xd3faf5cf, 0xabc27737, 0x5ac52d1b, - 0x5cb0679e, 0x4fa33742, 0xd3822740, 0x99bc9bbe, - 0xd5118e9d, 0xbf0f7315, 0xd62d1c7e, 0xc700c47b, - 0xb78c1b6b, 0x21a19045, 0xb26eb1be, 0x6a366eb4, - 0x5748ab2f, 0xbc946e79, 0xc6a376d2, 0x6549c2c8, - 0x530ff8ee, 0x468dde7d, 0xd5730a1d, 0x4cd04dc6, - 0x2939bbdb, 0xa9ba4650, 0xac9526e8, 0xbe5ee304, - 0xa1fad5f0, 0x6a2d519a, 0x63ef8ce2, 0x9a86ee22, - 0xc089c2b8, 0x43242ef6, 0xa51e03aa, 0x9cf2d0a4, - 0x83c061ba, 0x9be96a4d, 0x8fe51550, 0xba645bd6, - 0x2826a2f9, 0xa73a3ae1, 0x4ba99586, 0xef5562e9, - 0xc72fefd3, 0xf752f7da, 0x3f046f69, 0x77fa0a59, - 0x80e4a915, 0x87b08601, 0x9b09e6ad, 0x3b3ee593, - 0xe990fd5a, 0x9e34d797, 0x2cf0b7d9, 0x022b8b51, - 0x96d5ac3a, 0x017da67d, 0xd1cf3ed6, 0x7c7d2d28, - 0x1f9f25cf, 0xadf2b89b, 0x5ad6b472, 0x5a88f54c, - 0xe029ac71, 0xe019a5e6, 0x47b0acfd, 0xed93fa9b, - 0xe8d3c48d, 0x283b57cc, 0xf8d56629, 0x79132e28, - 0x785f0191, 0xed756055, 0xf7960e44, 0xe3d35e8c, - 0x15056dd4, 0x88f46dba, 0x03a16125, 0x0564f0bd, - 0xc3eb9e15, 0x3c9057a2, 0x97271aec, 0xa93a072a, - 0x1b3f6d9b, 0x1e6321f5, 0xf59c66fb, 0x26dcf319, - 0x7533d928, 0xb155fdf5, 0x03563482, 0x8aba3cbb, - 0x28517711, 0xc20ad9f8, 0xabcc5167, 0xccad925f, - 0x4de81751, 0x3830dc8e, 0x379d5862, 0x9320f991, - 0xea7a90c2, 0xfb3e7bce, 0x5121ce64, 0x774fbe32, - 0xa8b6e37e, 0xc3293d46, 0x48de5369, 0x6413e680, - 0xa2ae0810, 0xdd6db224, 0x69852dfd, 0x09072166, - 0xb39a460a, 0x6445c0dd, 0x586cdecf, 0x1c20c8ae, - 0x5bbef7dd, 0x1b588d40, 0xccd2017f, 0x6bb4e3bb, - 0xdda26a7e, 0x3a59ff45, 0x3e350a44, 0xbcb4cdd5, - 0x72eacea8, 0xfa6484bb, 0x8d6612ae, 0xbf3c6f47, - 0xd29be463, 0x542f5d9e, 0xaec2771b, 0xf64e6370, - 0x740e0d8d, 0xe75b1357, 0xf8721671, 0xaf537d5d, - 0x4040cb08, 0x4eb4e2cc, 0x34d2466a, 0x0115af84, - 0xe1b00428, 0x95983a1d, 0x06b89fb4, 0xce6ea048, - 0x6f3f3b82, 0x3520ab82, 0x011a1d4b, 0x277227f8, - 0x611560b1, 0xe7933fdc, 0xbb3a792b, 0x344525bd, - 0xa08839e1, 0x51ce794b, 0x2f32c9b7, 0xa01fbac9, - 0xe01cc87e, 0xbcc7d1f6, 0xcf0111c3, 0xa1e8aac7, - 0x1a908749, 0xd44fbd9a, 0xd0dadecb, 0xd50ada38, - 0x0339c32a, 0xc6913667, 0x8df9317c, 0xe0b12b4f, - 0xf79e59b7, 0x43f5bb3a, 0xf2d519ff, 0x27d9459c, - 0xbf97222c, 0x15e6fc2a, 0x0f91fc71, 0x9b941525, - 0xfae59361, 0xceb69ceb, 0xc2a86459, 0x12baa8d1, - 0xb6c1075e, 0xe3056a0c, 0x10d25065, 0xcb03a442, - 0xe0ec6e0e, 0x1698db3b, 0x4c98a0be, 0x3278e964, - 0x9f1f9532, 0xe0d392df, 0xd3a0342b, 0x8971f21e, - 0x1b0a7441, 0x4ba3348c, 0xc5be7120, 0xc37632d8, - 0xdf359f8d, 0x9b992f2e, 0xe60b6f47, 0x0fe3f11d, - 0xe54cda54, 0x1edad891, 0xce6279cf, 0xcd3e7e6f, - 0x1618b166, 0xfd2c1d05, 0x848fd2c5, 0xf6fb2299, - 0xf523f357, 0xa6327623, 0x93a83531, 0x56cccd02, - 0xacf08162, 0x5a75ebb5, 0x6e163697, 0x88d273cc, - 0xde966292, 0x81b949d0, 0x4c50901b, 0x71c65614, - 0xe6c6c7bd, 0x327a140a, 0x45e1d006, 0xc3f27b9a, - 0xc9aa53fd, 0x62a80f00, 0xbb25bfe2, 0x35bdd2f6, - 0x71126905, 0xb2040222, 0xb6cbcf7c, 0xcd769c2b, - 0x53113ec0, 0x1640e3d3, 0x38abbd60, 0x2547adf0, - 0xba38209c, 0xf746ce76, 0x77afa1c5, 0x20756060, - 0x85cbfe4e, 0x8ae88dd8, 0x7aaaf9b0, 0x4cf9aa7e, - 0x1948c25c, 0x02fb8a8c, 0x01c36ae4, 0xd6ebe1f9, - 0x90d4f869, 0xa65cdea0, 0x3f09252d, 0xc208e69f, - 0xb74e6132, 0xce77e25b, 0x578fdfe3, 0x3ac372e6]) - ]; - this.P = new Uint32Array([ - 0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344, - 0xa4093822, 0x299f31d0, 0x082efa98, 0xec4e6c89, - 0x452821e6, 0x38d01377, 0xbe5466cf, 0x34e90c6c, - 0xc0ac29b7, 0xc97c50dd, 0x3f84d5b5, 0xb5470917, - 0x9216d5d9, 0x8979fb1b]); -}; + if (!request.doNotModifyHeaders && !this.isCodeCommitGit) { + if (request.body && !headers['Content-Type'] && !headers['content-type']) + headers['Content-Type'] = 'application/x-www-form-urlencoded; charset=utf-8' -function F(S, x8, i) { - return (((S[0][x8[i+3]] + - S[1][x8[i+2]]) ^ - S[2][x8[i+1]]) + - S[3][x8[i]]); -}; + if (request.body && !headers['Content-Length'] && !headers['content-length']) + headers['Content-Length'] = Buffer.byteLength(request.body) -Blowfish.prototype.encipher = function(x, x8) { - if (x8 === undefined) { - x8 = new Uint8Array(x.buffer); - if (x.byteOffset !== 0) - x8 = x8.subarray(x.byteOffset); - } - x[0] ^= this.P[0]; - for (var i = 1; i < 16; i += 2) { - x[1] ^= F(this.S, x8, 0) ^ this.P[i]; - x[0] ^= F(this.S, x8, 4) ^ this.P[i+1]; + if (this.credentials.sessionToken && !headers['X-Amz-Security-Token'] && !headers['x-amz-security-token']) + headers['X-Amz-Security-Token'] = this.credentials.sessionToken + + if (this.service === 's3' && !headers['X-Amz-Content-Sha256'] && !headers['x-amz-content-sha256']) + headers['X-Amz-Content-Sha256'] = hash(this.request.body || '', 'hex') + + if (headers['X-Amz-Date'] || headers['x-amz-date']) + this.datetime = headers['X-Amz-Date'] || headers['x-amz-date'] + else + headers['X-Amz-Date'] = this.getDateTime() + } + + delete headers.Authorization + delete headers.authorization } - var t = x[0]; - x[0] = x[1] ^ this.P[17]; - x[1] = t; -}; +} -Blowfish.prototype.decipher = function(x) { - var x8 = new Uint8Array(x.buffer); - if (x.byteOffset !== 0) - x8 = x8.subarray(x.byteOffset); - x[0] ^= this.P[17]; - for (var i = 16; i > 0; i -= 2) { - x[1] ^= F(this.S, x8, 0) ^ this.P[i]; - x[0] ^= F(this.S, x8, 4) ^ this.P[i-1]; +RequestSigner.prototype.sign = function() { + if (!this.parsedPath) this.prepareRequest() + + if (this.request.signQuery) { + this.parsedPath.query['X-Amz-Signature'] = this.signature() + } else { + this.request.headers.Authorization = this.authHeader() } - var t = x[0]; - x[0] = x[1] ^ this.P[0]; - x[1] = t; -}; -function stream2word(data, databytes){ - var i, temp = 0; - for (i = 0; i < 4; i++, BLF_J++) { - if (BLF_J >= databytes) BLF_J = 0; - temp = (temp << 8) | data[BLF_J]; + this.request.path = this.formatPath() + + return this.request +} + +RequestSigner.prototype.getDateTime = function() { + if (!this.datetime) { + var headers = this.request.headers, + date = new Date(headers.Date || headers.date || new Date) + + this.datetime = date.toISOString().replace(/[:\-]|\.\d{3}/g, '') + + // Remove the trailing 'Z' on the timestamp string for CodeCommit git access + if (this.isCodeCommitGit) this.datetime = this.datetime.slice(0, -1) } - return temp; -}; + return this.datetime +} -Blowfish.prototype.expand0state = function(key, keybytes) { - var d = new Uint32Array(2), i, k; - var d8 = new Uint8Array(d.buffer); +RequestSigner.prototype.getDate = function() { + return this.getDateTime().substr(0, 8) +} - for (i = 0, BLF_J = 0; i < 18; i++) { - this.P[i] ^= stream2word(key, keybytes); +RequestSigner.prototype.authHeader = function() { + return [ + 'AWS4-HMAC-SHA256 Credential=' + this.credentials.accessKeyId + '/' + this.credentialString(), + 'SignedHeaders=' + this.signedHeaders(), + 'Signature=' + this.signature(), + ].join(', ') +} + +RequestSigner.prototype.signature = function() { + var date = this.getDate(), + cacheKey = [this.credentials.secretAccessKey, date, this.region, this.service].join(), + kDate, kRegion, kService, kCredentials = credentialsCache.get(cacheKey) + if (!kCredentials) { + kDate = hmac('AWS4' + this.credentials.secretAccessKey, date) + kRegion = hmac(kDate, this.region) + kService = hmac(kRegion, this.service) + kCredentials = hmac(kService, 'aws4_request') + credentialsCache.set(cacheKey, kCredentials) } - BLF_J = 0; + return hmac(kCredentials, this.stringToSign(), 'hex') +} - for (i = 0; i < 18; i += 2) { - this.encipher(d, d8); - this.P[i] = d[0]; - this.P[i+1] = d[1]; +RequestSigner.prototype.stringToSign = function() { + return [ + 'AWS4-HMAC-SHA256', + this.getDateTime(), + this.credentialString(), + hash(this.canonicalString(), 'hex'), + ].join('\n') +} + +RequestSigner.prototype.canonicalString = function() { + if (!this.parsedPath) this.prepareRequest() + + var pathStr = this.parsedPath.path, + query = this.parsedPath.query, + headers = this.request.headers, + queryStr = '', + normalizePath = this.service !== 's3', + decodePath = this.service === 's3' || this.request.doNotEncodePath, + decodeSlashesInPath = this.service === 's3', + firstValOnly = this.service === 's3', + bodyHash + + if (this.service === 's3' && this.request.signQuery) { + bodyHash = 'UNSIGNED-PAYLOAD' + } else if (this.isCodeCommitGit) { + bodyHash = '' + } else { + bodyHash = headers['X-Amz-Content-Sha256'] || headers['x-amz-content-sha256'] || + hash(this.request.body || '', 'hex') } - for (i = 0; i < 4; i++) { - for (k = 0; k < 256; k += 2) { - this.encipher(d, d8); - this.S[i][k] = d[0]; - this.S[i][k+1] = d[1]; - } + if (query) { + var reducedQuery = Object.keys(query).reduce(function(obj, key) { + if (!key) return obj + obj[encodeRfc3986Full(key)] = !Array.isArray(query[key]) ? query[key] : + (firstValOnly ? query[key][0] : query[key]) + return obj + }, {}) + var encodedQueryPieces = [] + Object.keys(reducedQuery).sort().forEach(function(key) { + if (!Array.isArray(reducedQuery[key])) { + encodedQueryPieces.push(key + '=' + encodeRfc3986Full(reducedQuery[key])) + } else { + reducedQuery[key].map(encodeRfc3986Full).sort() + .forEach(function(val) { encodedQueryPieces.push(key + '=' + val) }) + } + }) + queryStr = encodedQueryPieces.join('&') + } + if (pathStr !== '/') { + if (normalizePath) pathStr = pathStr.replace(/\/{2,}/g, '/') + pathStr = pathStr.split('/').reduce(function(path, piece) { + if (normalizePath && piece === '..') { + path.pop() + } else if (!normalizePath || piece !== '.') { + if (decodePath) piece = decodeURIComponent(piece.replace(/\+/g, ' ')) + path.push(encodeRfc3986Full(piece)) + } + return path + }, []).join('/') + if (pathStr[0] !== '/') pathStr = '/' + pathStr + if (decodeSlashesInPath) pathStr = pathStr.replace(/%2F/g, '/') } -}; -Blowfish.prototype.expandstate = function(data, databytes, key, keybytes) { - var d = new Uint32Array(2), i, k; + return [ + this.request.method || 'GET', + pathStr, + queryStr, + this.canonicalHeaders() + '\n', + this.signedHeaders(), + bodyHash, + ].join('\n') +} - for (i = 0, BLF_J = 0; i < 18; i++) { - this.P[i] ^= stream2word(key, keybytes); +RequestSigner.prototype.canonicalHeaders = function() { + var headers = this.request.headers + function trimAll(header) { + return header.toString().trim().replace(/\s+/g, ' ') } + return Object.keys(headers) + .filter(function(key) { return HEADERS_TO_IGNORE[key.toLowerCase()] == null }) + .sort(function(a, b) { return a.toLowerCase() < b.toLowerCase() ? -1 : 1 }) + .map(function(key) { return key.toLowerCase() + ':' + trimAll(headers[key]) }) + .join('\n') +} - for (i = 0, BLF_J = 0; i < 18; i += 2) { - d[0] ^= stream2word(data, databytes); - d[1] ^= stream2word(data, databytes); - this.encipher(d); - this.P[i] = d[0]; - this.P[i+1] = d[1]; +RequestSigner.prototype.signedHeaders = function() { + return Object.keys(this.request.headers) + .map(function(key) { return key.toLowerCase() }) + .filter(function(key) { return HEADERS_TO_IGNORE[key] == null }) + .sort() + .join(';') +} + +RequestSigner.prototype.credentialString = function() { + return [ + this.getDate(), + this.region, + this.service, + 'aws4_request', + ].join('/') +} + +RequestSigner.prototype.defaultCredentials = function() { + var env = process.env + return { + accessKeyId: env.AWS_ACCESS_KEY_ID || env.AWS_ACCESS_KEY, + secretAccessKey: env.AWS_SECRET_ACCESS_KEY || env.AWS_SECRET_KEY, + sessionToken: env.AWS_SESSION_TOKEN, } +} - for (i = 0; i < 4; i++) { - for (k = 0; k < 256; k += 2) { - d[0] ^= stream2word(data, databytes); - d[1] ^= stream2word(data, databytes); - this.encipher(d); - this.S[i][k] = d[0]; - this.S[i][k+1] = d[1]; - } +RequestSigner.prototype.parsePath = function() { + var path = this.request.path || '/' + + // S3 doesn't always encode characters > 127 correctly and + // all services don't encode characters > 255 correctly + // So if there are non-reserved chars (and it's not already all % encoded), just encode them all + if (/[^0-9A-Za-z;,/?:@&=+$\-_.!~*'()#%]/.test(path)) { + path = encodeURI(decodeURI(path)) } - BLF_J = 0; -}; -Blowfish.prototype.enc = function(data, blocks) { - for (var i = 0; i < blocks; i++) { - this.encipher(data.subarray(i*2)); + var queryIx = path.indexOf('?'), + query = null + + if (queryIx >= 0) { + query = querystring.parse(path.slice(queryIx + 1)) + path = path.slice(0, queryIx) } -}; -Blowfish.prototype.dec = function(data, blocks) { - for (var i = 0; i < blocks; i++) { - this.decipher(data.subarray(i*2)); + this.parsedPath = { + path: path, + query: query, } -}; +} -var BCRYPT_BLOCKS = 8, - BCRYPT_HASHSIZE = 32; +RequestSigner.prototype.formatPath = function() { + var path = this.parsedPath.path, + query = this.parsedPath.query -function bcrypt_hash(sha2pass, sha2salt, out) { - var state = new Blowfish(), - cdata = new Uint32Array(BCRYPT_BLOCKS), i, - ciphertext = new Uint8Array([79,120,121,99,104,114,111,109,97,116,105, - 99,66,108,111,119,102,105,115,104,83,119,97,116,68,121,110,97,109, - 105,116,101]); //"OxychromaticBlowfishSwatDynamite" + if (!query) return path - state.expandstate(sha2salt, 64, sha2pass, 64); - for (i = 0; i < 64; i++) { - state.expand0state(sha2salt, 64); - state.expand0state(sha2pass, 64); - } + // Services don't support empty query string keys + if (query[''] != null) delete query[''] - for (i = 0; i < BCRYPT_BLOCKS; i++) - cdata[i] = stream2word(ciphertext, ciphertext.byteLength); - for (i = 0; i < 64; i++) - state.enc(cdata, cdata.byteLength / 8); + return path + '?' + encodeRfc3986(querystring.stringify(query)) +} - for (i = 0; i < BCRYPT_BLOCKS; i++) { - out[4*i+3] = cdata[i] >>> 24; - out[4*i+2] = cdata[i] >>> 16; - out[4*i+1] = cdata[i] >>> 8; - out[4*i+0] = cdata[i]; +aws4.RequestSigner = RequestSigner + +aws4.sign = function(request, credentials) { + return new RequestSigner(request, credentials).sign() +} + + +/***/ }), + +/***/ 74225: +/***/ ((module) => { + +module.exports = function(size) { + return new LruCache(size) +} + +function LruCache(size) { + this.capacity = size | 0 + this.map = Object.create(null) + this.list = new DoublyLinkedList() +} + +LruCache.prototype.get = function(key) { + var node = this.map[key] + if (node == null) return undefined + this.used(node) + return node.val +} + +LruCache.prototype.set = function(key, val) { + var node = this.map[key] + if (node != null) { + node.val = val + } else { + if (!this.capacity) this.prune() + if (!this.capacity) return false + node = new DoublyLinkedNode(key, val) + this.map[key] = node + this.capacity-- } -}; + this.used(node) + return true +} -function bcrypt_pbkdf(pass, passlen, salt, saltlen, key, keylen, rounds) { - var sha2pass = new Uint8Array(64), - sha2salt = new Uint8Array(64), - out = new Uint8Array(BCRYPT_HASHSIZE), - tmpout = new Uint8Array(BCRYPT_HASHSIZE), - countsalt = new Uint8Array(saltlen+4), - i, j, amt, stride, dest, count, - origkeylen = keylen; +LruCache.prototype.used = function(node) { + this.list.moveToFront(node) +} - if (rounds < 1) - return -1; - if (passlen === 0 || saltlen === 0 || keylen === 0 || - keylen > (out.byteLength * out.byteLength) || saltlen > (1<<20)) - return -1; +LruCache.prototype.prune = function() { + var node = this.list.pop() + if (node != null) { + delete this.map[node.key] + this.capacity++ + } +} - stride = Math.floor((keylen + out.byteLength - 1) / out.byteLength); - amt = Math.floor((keylen + stride - 1) / stride); - for (i = 0; i < saltlen; i++) - countsalt[i] = salt[i]; +function DoublyLinkedList() { + this.firstNode = null + this.lastNode = null +} - crypto_hash_sha512(sha2pass, pass, passlen); +DoublyLinkedList.prototype.moveToFront = function(node) { + if (this.firstNode == node) return - for (count = 1; keylen > 0; count++) { - countsalt[saltlen+0] = count >>> 24; - countsalt[saltlen+1] = count >>> 16; - countsalt[saltlen+2] = count >>> 8; - countsalt[saltlen+3] = count; + this.remove(node) - crypto_hash_sha512(sha2salt, countsalt, saltlen + 4); - bcrypt_hash(sha2pass, sha2salt, tmpout); - for (i = out.byteLength; i--;) - out[i] = tmpout[i]; + if (this.firstNode == null) { + this.firstNode = node + this.lastNode = node + node.prev = null + node.next = null + } else { + node.prev = null + node.next = this.firstNode + node.next.prev = node + this.firstNode = node + } +} - for (i = 1; i < rounds; i++) { - crypto_hash_sha512(sha2salt, tmpout, tmpout.byteLength); - bcrypt_hash(sha2pass, sha2salt, tmpout); - for (j = 0; j < out.byteLength; j++) - out[j] ^= tmpout[j]; - } +DoublyLinkedList.prototype.pop = function() { + var lastNode = this.lastNode + if (lastNode != null) { + this.remove(lastNode) + } + return lastNode +} - amt = Math.min(amt, keylen); - for (i = 0; i < amt; i++) { - dest = i * stride + (count - 1); - if (dest >= origkeylen) - break; - key[dest] = out[i]; - } - keylen -= i; +DoublyLinkedList.prototype.remove = function(node) { + if (this.firstNode == node) { + this.firstNode = node.next + } else if (node.prev != null) { + node.prev.next = node.next + } + if (this.lastNode == node) { + this.lastNode = node.prev + } else if (node.next != null) { + node.next.prev = node.prev } +} - return 0; -}; -module.exports = { - BLOCKS: BCRYPT_BLOCKS, - HASHSIZE: BCRYPT_HASHSIZE, - hash: bcrypt_hash, - pbkdf: bcrypt_pbkdf -}; +function DoublyLinkedNode(key, val) { + this.key = key + this.val = val + this.prev = null + this.next = null +} /***/ }), -/***/ 87558: -/***/ (function(module) { - -;(function (globalObject) { - 'use strict'; - -/* - * bignumber.js v9.0.0 - * A JavaScript library for arbitrary-precision arithmetic. - * https://github.com/MikeMcl/bignumber.js - * Copyright (c) 2019 Michael Mclaughlin - * MIT Licensed. - * - * BigNumber.prototype methods | BigNumber methods - * | - * absoluteValue abs | clone - * comparedTo | config set - * decimalPlaces dp | DECIMAL_PLACES - * dividedBy div | ROUNDING_MODE - * dividedToIntegerBy idiv | EXPONENTIAL_AT - * exponentiatedBy pow | RANGE - * integerValue | CRYPTO - * isEqualTo eq | MODULO_MODE - * isFinite | POW_PRECISION - * isGreaterThan gt | FORMAT - * isGreaterThanOrEqualTo gte | ALPHABET - * isInteger | isBigNumber - * isLessThan lt | maximum max - * isLessThanOrEqualTo lte | minimum min - * isNaN | random - * isNegative | sum - * isPositive | - * isZero | - * minus | - * modulo mod | - * multipliedBy times | - * negated | - * plus | - * precision sd | - * shiftedBy | - * squareRoot sqrt | - * toExponential | - * toFixed | - * toFormat | - * toFraction | - * toJSON | - * toNumber | - * toPrecision | - * toString | - * valueOf | - * - */ - - - var BigNumber, - isNumeric = /^-?(?:\d+(?:\.\d*)?|\.\d+)(?:e[+-]?\d+)?$/i, - mathceil = Math.ceil, - mathfloor = Math.floor, - - bignumberError = '[BigNumber Error] ', - tooManyDigits = bignumberError + 'Number primitive has more than 15 significant digits: ', - - BASE = 1e14, - LOG_BASE = 14, - MAX_SAFE_INTEGER = 0x1fffffffffffff, // 2^53 - 1 - // MAX_INT32 = 0x7fffffff, // 2^31 - 1 - POWS_TEN = [1, 10, 100, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9, 1e10, 1e11, 1e12, 1e13], - SQRT_BASE = 1e7, - - // EDITABLE - // The limit on the value of DECIMAL_PLACES, TO_EXP_NEG, TO_EXP_POS, MIN_EXP, MAX_EXP, and - // the arguments to toExponential, toFixed, toFormat, and toPrecision. - MAX = 1E9; // 0 to MAX_INT32 - - - /* - * Create and return a BigNumber constructor. - */ - function clone(configObject) { - var div, convertBase, parseNumeric, - P = BigNumber.prototype = { constructor: BigNumber, toString: null, valueOf: null }, - ONE = new BigNumber(1), - - - //----------------------------- EDITABLE CONFIG DEFAULTS ------------------------------- - - - // The default values below must be integers within the inclusive ranges stated. - // The values can also be changed at run-time using BigNumber.set. - - // The maximum number of decimal places for operations involving division. - DECIMAL_PLACES = 20, // 0 to MAX - - // The rounding mode used when rounding to the above decimal places, and when using - // toExponential, toFixed, toFormat and toPrecision, and round (default value). - // UP 0 Away from zero. - // DOWN 1 Towards zero. - // CEIL 2 Towards +Infinity. - // FLOOR 3 Towards -Infinity. - // HALF_UP 4 Towards nearest neighbour. If equidistant, up. - // HALF_DOWN 5 Towards nearest neighbour. If equidistant, down. - // HALF_EVEN 6 Towards nearest neighbour. If equidistant, towards even neighbour. - // HALF_CEIL 7 Towards nearest neighbour. If equidistant, towards +Infinity. - // HALF_FLOOR 8 Towards nearest neighbour. If equidistant, towards -Infinity. - ROUNDING_MODE = 4, // 0 to 8 - - // EXPONENTIAL_AT : [TO_EXP_NEG , TO_EXP_POS] - - // The exponent value at and beneath which toString returns exponential notation. - // Number type: -7 - TO_EXP_NEG = -7, // 0 to -MAX - - // The exponent value at and above which toString returns exponential notation. - // Number type: 21 - TO_EXP_POS = 21, // 0 to MAX - - // RANGE : [MIN_EXP, MAX_EXP] - - // The minimum exponent value, beneath which underflow to zero occurs. - // Number type: -324 (5e-324) - MIN_EXP = -1e7, // -1 to -MAX - - // The maximum exponent value, above which overflow to Infinity occurs. - // Number type: 308 (1.7976931348623157e+308) - // For MAX_EXP > 1e7, e.g. new BigNumber('1e100000000').plus(1) may be slow. - MAX_EXP = 1e7, // 1 to MAX - - // Whether to use cryptographically-secure random number generation, if available. - CRYPTO = false, // true or false - - // The modulo mode used when calculating the modulus: a mod n. - // The quotient (q = a / n) is calculated according to the corresponding rounding mode. - // The remainder (r) is calculated as: r = a - n * q. - // - // UP 0 The remainder is positive if the dividend is negative, else is negative. - // DOWN 1 The remainder has the same sign as the dividend. - // This modulo mode is commonly known as 'truncated division' and is - // equivalent to (a % n) in JavaScript. - // FLOOR 3 The remainder has the same sign as the divisor (Python %). - // HALF_EVEN 6 This modulo mode implements the IEEE 754 remainder function. - // EUCLID 9 Euclidian division. q = sign(n) * floor(a / abs(n)). - // The remainder is always positive. - // - // The truncated division, floored division, Euclidian division and IEEE 754 remainder - // modes are commonly used for the modulus operation. - // Although the other rounding modes can also be used, they may not give useful results. - MODULO_MODE = 1, // 0 to 9 - - // The maximum number of significant digits of the result of the exponentiatedBy operation. - // If POW_PRECISION is 0, there will be unlimited significant digits. - POW_PRECISION = 0, // 0 to MAX - - // The format specification used by the BigNumber.prototype.toFormat method. - FORMAT = { - prefix: '', - groupSize: 3, - secondaryGroupSize: 0, - groupSeparator: ',', - decimalSeparator: '.', - fractionGroupSize: 0, - fractionGroupSeparator: '\xA0', // non-breaking space - suffix: '' - }, - - // The alphabet used for base conversion. It must be at least 2 characters long, with no '+', - // '-', '.', whitespace, or repeated character. - // '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$_' - ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyz'; - - - //------------------------------------------------------------------------------------------ - - - // CONSTRUCTOR - - - /* - * The BigNumber constructor and exported function. - * Create and return a new instance of a BigNumber object. - * - * v {number|string|BigNumber} A numeric value. - * [b] {number} The base of v. Integer, 2 to ALPHABET.length inclusive. - */ - function BigNumber(v, b) { - var alphabet, c, caseChanged, e, i, isNum, len, str, - x = this; - - // Enable constructor call without `new`. - if (!(x instanceof BigNumber)) return new BigNumber(v, b); - - if (b == null) { - - if (v && v._isBigNumber === true) { - x.s = v.s; - - if (!v.c || v.e > MAX_EXP) { - x.c = x.e = null; - } else if (v.e < MIN_EXP) { - x.c = [x.e = 0]; - } else { - x.e = v.e; - x.c = v.c.slice(); - } - - return; - } - - if ((isNum = typeof v == 'number') && v * 0 == 0) { - - // Use `1 / n` to handle minus zero also. - x.s = 1 / v < 0 ? (v = -v, -1) : 1; - - // Fast path for integers, where n < 2147483648 (2**31). - if (v === ~~v) { - for (e = 0, i = v; i >= 10; i /= 10, e++); - - if (e > MAX_EXP) { - x.c = x.e = null; - } else { - x.e = e; - x.c = [v]; - } - - return; - } - - str = String(v); - } else { - - if (!isNumeric.test(str = String(v))) return parseNumeric(x, str, isNum); - - x.s = str.charCodeAt(0) == 45 ? (str = str.slice(1), -1) : 1; - } - - // Decimal point? - if ((e = str.indexOf('.')) > -1) str = str.replace('.', ''); - - // Exponential form? - if ((i = str.search(/e/i)) > 0) { - - // Determine exponent. - if (e < 0) e = i; - e += +str.slice(i + 1); - str = str.substring(0, i); - } else if (e < 0) { - - // Integer. - e = str.length; - } - - } else { - - // '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}' - intCheck(b, 2, ALPHABET.length, 'Base'); - - // Allow exponential notation to be used with base 10 argument, while - // also rounding to DECIMAL_PLACES as with other bases. - if (b == 10) { - x = new BigNumber(v); - return round(x, DECIMAL_PLACES + x.e + 1, ROUNDING_MODE); - } - - str = String(v); - - if (isNum = typeof v == 'number') { - - // Avoid potential interpretation of Infinity and NaN as base 44+ values. - if (v * 0 != 0) return parseNumeric(x, str, isNum, b); - - x.s = 1 / v < 0 ? (str = str.slice(1), -1) : 1; - - // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}' - if (BigNumber.DEBUG && str.replace(/^0\.0*|\./, '').length > 15) { - throw Error - (tooManyDigits + v); - } - } else { - x.s = str.charCodeAt(0) === 45 ? (str = str.slice(1), -1) : 1; - } - - alphabet = ALPHABET.slice(0, b); - e = i = 0; - - // Check that str is a valid base b number. - // Don't use RegExp, so alphabet can contain special characters. - for (len = str.length; i < len; i++) { - if (alphabet.indexOf(c = str.charAt(i)) < 0) { - if (c == '.') { - - // If '.' is not the first character and it has not be found before. - if (i > e) { - e = len; - continue; - } - } else if (!caseChanged) { - - // Allow e.g. hexadecimal 'FF' as well as 'ff'. - if (str == str.toUpperCase() && (str = str.toLowerCase()) || - str == str.toLowerCase() && (str = str.toUpperCase())) { - caseChanged = true; - i = -1; - e = 0; - continue; - } - } - - return parseNumeric(x, String(v), isNum, b); - } - } - - // Prevent later check for length on converted number. - isNum = false; - str = convertBase(str, b, 10, x.s); - - // Decimal point? - if ((e = str.indexOf('.')) > -1) str = str.replace('.', ''); - else e = str.length; - } - - // Determine leading zeros. - for (i = 0; str.charCodeAt(i) === 48; i++); - - // Determine trailing zeros. - for (len = str.length; str.charCodeAt(--len) === 48;); - - if (str = str.slice(i, ++len)) { - len -= i; - - // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}' - if (isNum && BigNumber.DEBUG && - len > 15 && (v > MAX_SAFE_INTEGER || v !== mathfloor(v))) { - throw Error - (tooManyDigits + (x.s * v)); - } - - // Overflow? - if ((e = e - i - 1) > MAX_EXP) { - - // Infinity. - x.c = x.e = null; - - // Underflow? - } else if (e < MIN_EXP) { - - // Zero. - x.c = [x.e = 0]; - } else { - x.e = e; - x.c = []; - - // Transform base - - // e is the base 10 exponent. - // i is where to slice str to get the first element of the coefficient array. - i = (e + 1) % LOG_BASE; - if (e < 0) i += LOG_BASE; // i < 1 - - if (i < len) { - if (i) x.c.push(+str.slice(0, i)); - - for (len -= LOG_BASE; i < len;) { - x.c.push(+str.slice(i, i += LOG_BASE)); - } - - i = LOG_BASE - (str = str.slice(i)).length; - } else { - i -= len; - } - - for (; i--; str += '0'); - x.c.push(+str); - } - } else { - - // Zero. - x.c = [x.e = 0]; - } - } - - - // CONSTRUCTOR PROPERTIES - - - BigNumber.clone = clone; - - BigNumber.ROUND_UP = 0; - BigNumber.ROUND_DOWN = 1; - BigNumber.ROUND_CEIL = 2; - BigNumber.ROUND_FLOOR = 3; - BigNumber.ROUND_HALF_UP = 4; - BigNumber.ROUND_HALF_DOWN = 5; - BigNumber.ROUND_HALF_EVEN = 6; - BigNumber.ROUND_HALF_CEIL = 7; - BigNumber.ROUND_HALF_FLOOR = 8; - BigNumber.EUCLID = 9; - - - /* - * Configure infrequently-changing library-wide settings. - * - * Accept an object with the following optional properties (if the value of a property is - * a number, it must be an integer within the inclusive range stated): - * - * DECIMAL_PLACES {number} 0 to MAX - * ROUNDING_MODE {number} 0 to 8 - * EXPONENTIAL_AT {number|number[]} -MAX to MAX or [-MAX to 0, 0 to MAX] - * RANGE {number|number[]} -MAX to MAX (not zero) or [-MAX to -1, 1 to MAX] - * CRYPTO {boolean} true or false - * MODULO_MODE {number} 0 to 9 - * POW_PRECISION {number} 0 to MAX - * ALPHABET {string} A string of two or more unique characters which does - * not contain '.'. - * FORMAT {object} An object with some of the following properties: - * prefix {string} - * groupSize {number} - * secondaryGroupSize {number} - * groupSeparator {string} - * decimalSeparator {string} - * fractionGroupSize {number} - * fractionGroupSeparator {string} - * suffix {string} - * - * (The values assigned to the above FORMAT object properties are not checked for validity.) - * - * E.g. - * BigNumber.config({ DECIMAL_PLACES : 20, ROUNDING_MODE : 4 }) - * - * Ignore properties/parameters set to null or undefined, except for ALPHABET. - * - * Return an object with the properties current values. - */ - BigNumber.config = BigNumber.set = function (obj) { - var p, v; - - if (obj != null) { - - if (typeof obj == 'object') { - - // DECIMAL_PLACES {number} Integer, 0 to MAX inclusive. - // '[BigNumber Error] DECIMAL_PLACES {not a primitive number|not an integer|out of range}: {v}' - if (obj.hasOwnProperty(p = 'DECIMAL_PLACES')) { - v = obj[p]; - intCheck(v, 0, MAX, p); - DECIMAL_PLACES = v; - } - - // ROUNDING_MODE {number} Integer, 0 to 8 inclusive. - // '[BigNumber Error] ROUNDING_MODE {not a primitive number|not an integer|out of range}: {v}' - if (obj.hasOwnProperty(p = 'ROUNDING_MODE')) { - v = obj[p]; - intCheck(v, 0, 8, p); - ROUNDING_MODE = v; - } - - // EXPONENTIAL_AT {number|number[]} - // Integer, -MAX to MAX inclusive or - // [integer -MAX to 0 inclusive, 0 to MAX inclusive]. - // '[BigNumber Error] EXPONENTIAL_AT {not a primitive number|not an integer|out of range}: {v}' - if (obj.hasOwnProperty(p = 'EXPONENTIAL_AT')) { - v = obj[p]; - if (v && v.pop) { - intCheck(v[0], -MAX, 0, p); - intCheck(v[1], 0, MAX, p); - TO_EXP_NEG = v[0]; - TO_EXP_POS = v[1]; - } else { - intCheck(v, -MAX, MAX, p); - TO_EXP_NEG = -(TO_EXP_POS = v < 0 ? -v : v); - } - } - - // RANGE {number|number[]} Non-zero integer, -MAX to MAX inclusive or - // [integer -MAX to -1 inclusive, integer 1 to MAX inclusive]. - // '[BigNumber Error] RANGE {not a primitive number|not an integer|out of range|cannot be zero}: {v}' - if (obj.hasOwnProperty(p = 'RANGE')) { - v = obj[p]; - if (v && v.pop) { - intCheck(v[0], -MAX, -1, p); - intCheck(v[1], 1, MAX, p); - MIN_EXP = v[0]; - MAX_EXP = v[1]; - } else { - intCheck(v, -MAX, MAX, p); - if (v) { - MIN_EXP = -(MAX_EXP = v < 0 ? -v : v); - } else { - throw Error - (bignumberError + p + ' cannot be zero: ' + v); - } - } - } - - // CRYPTO {boolean} true or false. - // '[BigNumber Error] CRYPTO not true or false: {v}' - // '[BigNumber Error] crypto unavailable' - if (obj.hasOwnProperty(p = 'CRYPTO')) { - v = obj[p]; - if (v === !!v) { - if (v) { - if (typeof crypto != 'undefined' && crypto && - (crypto.getRandomValues || crypto.randomBytes)) { - CRYPTO = v; - } else { - CRYPTO = !v; - throw Error - (bignumberError + 'crypto unavailable'); - } - } else { - CRYPTO = v; - } - } else { - throw Error - (bignumberError + p + ' not true or false: ' + v); - } - } - - // MODULO_MODE {number} Integer, 0 to 9 inclusive. - // '[BigNumber Error] MODULO_MODE {not a primitive number|not an integer|out of range}: {v}' - if (obj.hasOwnProperty(p = 'MODULO_MODE')) { - v = obj[p]; - intCheck(v, 0, 9, p); - MODULO_MODE = v; - } - - // POW_PRECISION {number} Integer, 0 to MAX inclusive. - // '[BigNumber Error] POW_PRECISION {not a primitive number|not an integer|out of range}: {v}' - if (obj.hasOwnProperty(p = 'POW_PRECISION')) { - v = obj[p]; - intCheck(v, 0, MAX, p); - POW_PRECISION = v; - } - - // FORMAT {object} - // '[BigNumber Error] FORMAT not an object: {v}' - if (obj.hasOwnProperty(p = 'FORMAT')) { - v = obj[p]; - if (typeof v == 'object') FORMAT = v; - else throw Error - (bignumberError + p + ' not an object: ' + v); - } - - // ALPHABET {string} - // '[BigNumber Error] ALPHABET invalid: {v}' - if (obj.hasOwnProperty(p = 'ALPHABET')) { - v = obj[p]; - - // Disallow if only one character, - // or if it contains '+', '-', '.', whitespace, or a repeated character. - if (typeof v == 'string' && !/^.$|[+-.\s]|(.).*\1/.test(v)) { - ALPHABET = v; - } else { - throw Error - (bignumberError + p + ' invalid: ' + v); - } - } - - } else { - - // '[BigNumber Error] Object expected: {v}' - throw Error - (bignumberError + 'Object expected: ' + obj); - } - } - - return { - DECIMAL_PLACES: DECIMAL_PLACES, - ROUNDING_MODE: ROUNDING_MODE, - EXPONENTIAL_AT: [TO_EXP_NEG, TO_EXP_POS], - RANGE: [MIN_EXP, MAX_EXP], - CRYPTO: CRYPTO, - MODULO_MODE: MODULO_MODE, - POW_PRECISION: POW_PRECISION, - FORMAT: FORMAT, - ALPHABET: ALPHABET - }; - }; - - - /* - * Return true if v is a BigNumber instance, otherwise return false. - * - * If BigNumber.DEBUG is true, throw if a BigNumber instance is not well-formed. - * - * v {any} - * - * '[BigNumber Error] Invalid BigNumber: {v}' - */ - BigNumber.isBigNumber = function (v) { - if (!v || v._isBigNumber !== true) return false; - if (!BigNumber.DEBUG) return true; - - var i, n, - c = v.c, - e = v.e, - s = v.s; - - out: if ({}.toString.call(c) == '[object Array]') { - - if ((s === 1 || s === -1) && e >= -MAX && e <= MAX && e === mathfloor(e)) { - - // If the first element is zero, the BigNumber value must be zero. - if (c[0] === 0) { - if (e === 0 && c.length === 1) return true; - break out; - } - - // Calculate number of digits that c[0] should have, based on the exponent. - i = (e + 1) % LOG_BASE; - if (i < 1) i += LOG_BASE; - - // Calculate number of digits of c[0]. - //if (Math.ceil(Math.log(c[0] + 1) / Math.LN10) == i) { - if (String(c[0]).length == i) { - - for (i = 0; i < c.length; i++) { - n = c[i]; - if (n < 0 || n >= BASE || n !== mathfloor(n)) break out; - } - - // Last element cannot be zero, unless it is the only element. - if (n !== 0) return true; - } - } - - // Infinity/NaN - } else if (c === null && e === null && (s === null || s === 1 || s === -1)) { - return true; - } - - throw Error - (bignumberError + 'Invalid BigNumber: ' + v); - }; - - - /* - * Return a new BigNumber whose value is the maximum of the arguments. - * - * arguments {number|string|BigNumber} - */ - BigNumber.maximum = BigNumber.max = function () { - return maxOrMin(arguments, P.lt); - }; - - - /* - * Return a new BigNumber whose value is the minimum of the arguments. - * - * arguments {number|string|BigNumber} - */ - BigNumber.minimum = BigNumber.min = function () { - return maxOrMin(arguments, P.gt); - }; - - - /* - * Return a new BigNumber with a random value equal to or greater than 0 and less than 1, - * and with dp, or DECIMAL_PLACES if dp is omitted, decimal places (or less if trailing - * zeros are produced). - * - * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. - * - * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp}' - * '[BigNumber Error] crypto unavailable' - */ - BigNumber.random = (function () { - var pow2_53 = 0x20000000000000; - - // Return a 53 bit integer n, where 0 <= n < 9007199254740992. - // Check if Math.random() produces more than 32 bits of randomness. - // If it does, assume at least 53 bits are produced, otherwise assume at least 30 bits. - // 0x40000000 is 2^30, 0x800000 is 2^23, 0x1fffff is 2^21 - 1. - var random53bitInt = (Math.random() * pow2_53) & 0x1fffff - ? function () { return mathfloor(Math.random() * pow2_53); } - : function () { return ((Math.random() * 0x40000000 | 0) * 0x800000) + - (Math.random() * 0x800000 | 0); }; - - return function (dp) { - var a, b, e, k, v, - i = 0, - c = [], - rand = new BigNumber(ONE); - - if (dp == null) dp = DECIMAL_PLACES; - else intCheck(dp, 0, MAX); - - k = mathceil(dp / LOG_BASE); - - if (CRYPTO) { - - // Browsers supporting crypto.getRandomValues. - if (crypto.getRandomValues) { - - a = crypto.getRandomValues(new Uint32Array(k *= 2)); - - for (; i < k;) { - - // 53 bits: - // ((Math.pow(2, 32) - 1) * Math.pow(2, 21)).toString(2) - // 11111 11111111 11111111 11111111 11100000 00000000 00000000 - // ((Math.pow(2, 32) - 1) >>> 11).toString(2) - // 11111 11111111 11111111 - // 0x20000 is 2^21. - v = a[i] * 0x20000 + (a[i + 1] >>> 11); - - // Rejection sampling: - // 0 <= v < 9007199254740992 - // Probability that v >= 9e15, is - // 7199254740992 / 9007199254740992 ~= 0.0008, i.e. 1 in 1251 - if (v >= 9e15) { - b = crypto.getRandomValues(new Uint32Array(2)); - a[i] = b[0]; - a[i + 1] = b[1]; - } else { - - // 0 <= v <= 8999999999999999 - // 0 <= (v % 1e14) <= 99999999999999 - c.push(v % 1e14); - i += 2; - } - } - i = k / 2; - - // Node.js supporting crypto.randomBytes. - } else if (crypto.randomBytes) { - - // buffer - a = crypto.randomBytes(k *= 7); - - for (; i < k;) { - - // 0x1000000000000 is 2^48, 0x10000000000 is 2^40 - // 0x100000000 is 2^32, 0x1000000 is 2^24 - // 11111 11111111 11111111 11111111 11111111 11111111 11111111 - // 0 <= v < 9007199254740992 - v = ((a[i] & 31) * 0x1000000000000) + (a[i + 1] * 0x10000000000) + - (a[i + 2] * 0x100000000) + (a[i + 3] * 0x1000000) + - (a[i + 4] << 16) + (a[i + 5] << 8) + a[i + 6]; - - if (v >= 9e15) { - crypto.randomBytes(7).copy(a, i); - } else { - - // 0 <= (v % 1e14) <= 99999999999999 - c.push(v % 1e14); - i += 7; - } - } - i = k / 7; - } else { - CRYPTO = false; - throw Error - (bignumberError + 'crypto unavailable'); - } - } - - // Use Math.random. - if (!CRYPTO) { - - for (; i < k;) { - v = random53bitInt(); - if (v < 9e15) c[i++] = v % 1e14; - } - } - - k = c[--i]; - dp %= LOG_BASE; - - // Convert trailing digits to zeros according to dp. - if (k && dp) { - v = POWS_TEN[LOG_BASE - dp]; - c[i] = mathfloor(k / v) * v; - } - - // Remove trailing elements which are zero. - for (; c[i] === 0; c.pop(), i--); - - // Zero? - if (i < 0) { - c = [e = 0]; - } else { - - // Remove leading elements which are zero and adjust exponent accordingly. - for (e = -1 ; c[0] === 0; c.splice(0, 1), e -= LOG_BASE); - - // Count the digits of the first element of c to determine leading zeros, and... - for (i = 1, v = c[0]; v >= 10; v /= 10, i++); - - // adjust the exponent accordingly. - if (i < LOG_BASE) e -= LOG_BASE - i; - } - - rand.e = e; - rand.c = c; - return rand; - }; - })(); - - - /* - * Return a BigNumber whose value is the sum of the arguments. - * - * arguments {number|string|BigNumber} - */ - BigNumber.sum = function () { - var i = 1, - args = arguments, - sum = new BigNumber(args[0]); - for (; i < args.length;) sum = sum.plus(args[i++]); - return sum; - }; - - - // PRIVATE FUNCTIONS - - - // Called by BigNumber and BigNumber.prototype.toString. - convertBase = (function () { - var decimal = '0123456789'; - - /* - * Convert string of baseIn to an array of numbers of baseOut. - * Eg. toBaseOut('255', 10, 16) returns [15, 15]. - * Eg. toBaseOut('ff', 16, 10) returns [2, 5, 5]. - */ - function toBaseOut(str, baseIn, baseOut, alphabet) { - var j, - arr = [0], - arrL, - i = 0, - len = str.length; - - for (; i < len;) { - for (arrL = arr.length; arrL--; arr[arrL] *= baseIn); - - arr[0] += alphabet.indexOf(str.charAt(i++)); - - for (j = 0; j < arr.length; j++) { - - if (arr[j] > baseOut - 1) { - if (arr[j + 1] == null) arr[j + 1] = 0; - arr[j + 1] += arr[j] / baseOut | 0; - arr[j] %= baseOut; - } - } - } - - return arr.reverse(); - } - - // Convert a numeric string of baseIn to a numeric string of baseOut. - // If the caller is toString, we are converting from base 10 to baseOut. - // If the caller is BigNumber, we are converting from baseIn to base 10. - return function (str, baseIn, baseOut, sign, callerIsToString) { - var alphabet, d, e, k, r, x, xc, y, - i = str.indexOf('.'), - dp = DECIMAL_PLACES, - rm = ROUNDING_MODE; - - // Non-integer. - if (i >= 0) { - k = POW_PRECISION; - - // Unlimited precision. - POW_PRECISION = 0; - str = str.replace('.', ''); - y = new BigNumber(baseIn); - x = y.pow(str.length - i); - POW_PRECISION = k; - - // Convert str as if an integer, then restore the fraction part by dividing the - // result by its base raised to a power. - - y.c = toBaseOut(toFixedPoint(coeffToString(x.c), x.e, '0'), - 10, baseOut, decimal); - y.e = y.c.length; - } - - // Convert the number as integer. - - xc = toBaseOut(str, baseIn, baseOut, callerIsToString - ? (alphabet = ALPHABET, decimal) - : (alphabet = decimal, ALPHABET)); - - // xc now represents str as an integer and converted to baseOut. e is the exponent. - e = k = xc.length; - - // Remove trailing zeros. - for (; xc[--k] == 0; xc.pop()); - - // Zero? - if (!xc[0]) return alphabet.charAt(0); - - // Does str represent an integer? If so, no need for the division. - if (i < 0) { - --e; - } else { - x.c = xc; - x.e = e; - - // The sign is needed for correct rounding. - x.s = sign; - x = div(x, y, dp, rm, baseOut); - xc = x.c; - r = x.r; - e = x.e; - } - - // xc now represents str converted to baseOut. - - // THe index of the rounding digit. - d = e + dp + 1; - - // The rounding digit: the digit to the right of the digit that may be rounded up. - i = xc[d]; - - // Look at the rounding digits and mode to determine whether to round up. - - k = baseOut / 2; - r = r || d < 0 || xc[d + 1] != null; - - r = rm < 4 ? (i != null || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2)) - : i > k || i == k &&(rm == 4 || r || rm == 6 && xc[d - 1] & 1 || - rm == (x.s < 0 ? 8 : 7)); - - // If the index of the rounding digit is not greater than zero, or xc represents - // zero, then the result of the base conversion is zero or, if rounding up, a value - // such as 0.00001. - if (d < 1 || !xc[0]) { - - // 1^-dp or 0 - str = r ? toFixedPoint(alphabet.charAt(1), -dp, alphabet.charAt(0)) : alphabet.charAt(0); - } else { - - // Truncate xc to the required number of decimal places. - xc.length = d; - - // Round up? - if (r) { - - // Rounding up may mean the previous digit has to be rounded up and so on. - for (--baseOut; ++xc[--d] > baseOut;) { - xc[d] = 0; - - if (!d) { - ++e; - xc = [1].concat(xc); - } - } - } - - // Determine trailing zeros. - for (k = xc.length; !xc[--k];); - - // E.g. [4, 11, 15] becomes 4bf. - for (i = 0, str = ''; i <= k; str += alphabet.charAt(xc[i++])); - - // Add leading zeros, decimal point and trailing zeros as required. - str = toFixedPoint(str, e, alphabet.charAt(0)); - } - - // The caller will add the sign. - return str; - }; - })(); - - - // Perform division in the specified base. Called by div and convertBase. - div = (function () { - - // Assume non-zero x and k. - function multiply(x, k, base) { - var m, temp, xlo, xhi, - carry = 0, - i = x.length, - klo = k % SQRT_BASE, - khi = k / SQRT_BASE | 0; - - for (x = x.slice(); i--;) { - xlo = x[i] % SQRT_BASE; - xhi = x[i] / SQRT_BASE | 0; - m = khi * xlo + xhi * klo; - temp = klo * xlo + ((m % SQRT_BASE) * SQRT_BASE) + carry; - carry = (temp / base | 0) + (m / SQRT_BASE | 0) + khi * xhi; - x[i] = temp % base; - } - - if (carry) x = [carry].concat(x); - - return x; - } - - function compare(a, b, aL, bL) { - var i, cmp; - - if (aL != bL) { - cmp = aL > bL ? 1 : -1; - } else { - - for (i = cmp = 0; i < aL; i++) { - - if (a[i] != b[i]) { - cmp = a[i] > b[i] ? 1 : -1; - break; - } - } - } - - return cmp; - } - - function subtract(a, b, aL, base) { - var i = 0; - - // Subtract b from a. - for (; aL--;) { - a[aL] -= i; - i = a[aL] < b[aL] ? 1 : 0; - a[aL] = i * base + a[aL] - b[aL]; - } - - // Remove leading zeros. - for (; !a[0] && a.length > 1; a.splice(0, 1)); - } - - // x: dividend, y: divisor. - return function (x, y, dp, rm, base) { - var cmp, e, i, more, n, prod, prodL, q, qc, rem, remL, rem0, xi, xL, yc0, - yL, yz, - s = x.s == y.s ? 1 : -1, - xc = x.c, - yc = y.c; - - // Either NaN, Infinity or 0? - if (!xc || !xc[0] || !yc || !yc[0]) { - - return new BigNumber( - - // Return NaN if either NaN, or both Infinity or 0. - !x.s || !y.s || (xc ? yc && xc[0] == yc[0] : !yc) ? NaN : - - // Return ±0 if x is ±0 or y is ±Infinity, or return ±Infinity as y is ±0. - xc && xc[0] == 0 || !yc ? s * 0 : s / 0 - ); - } - - q = new BigNumber(s); - qc = q.c = []; - e = x.e - y.e; - s = dp + e + 1; - - if (!base) { - base = BASE; - e = bitFloor(x.e / LOG_BASE) - bitFloor(y.e / LOG_BASE); - s = s / LOG_BASE | 0; - } - - // Result exponent may be one less then the current value of e. - // The coefficients of the BigNumbers from convertBase may have trailing zeros. - for (i = 0; yc[i] == (xc[i] || 0); i++); - - if (yc[i] > (xc[i] || 0)) e--; - - if (s < 0) { - qc.push(1); - more = true; - } else { - xL = xc.length; - yL = yc.length; - i = 0; - s += 2; - - // Normalise xc and yc so highest order digit of yc is >= base / 2. - - n = mathfloor(base / (yc[0] + 1)); - - // Not necessary, but to handle odd bases where yc[0] == (base / 2) - 1. - // if (n > 1 || n++ == 1 && yc[0] < base / 2) { - if (n > 1) { - yc = multiply(yc, n, base); - xc = multiply(xc, n, base); - yL = yc.length; - xL = xc.length; - } - - xi = yL; - rem = xc.slice(0, yL); - remL = rem.length; - - // Add zeros to make remainder as long as divisor. - for (; remL < yL; rem[remL++] = 0); - yz = yc.slice(); - yz = [0].concat(yz); - yc0 = yc[0]; - if (yc[1] >= base / 2) yc0++; - // Not necessary, but to prevent trial digit n > base, when using base 3. - // else if (base == 3 && yc0 == 1) yc0 = 1 + 1e-15; - - do { - n = 0; - - // Compare divisor and remainder. - cmp = compare(yc, rem, yL, remL); - - // If divisor < remainder. - if (cmp < 0) { - - // Calculate trial digit, n. - - rem0 = rem[0]; - if (yL != remL) rem0 = rem0 * base + (rem[1] || 0); - - // n is how many times the divisor goes into the current remainder. - n = mathfloor(rem0 / yc0); - - // Algorithm: - // product = divisor multiplied by trial digit (n). - // Compare product and remainder. - // If product is greater than remainder: - // Subtract divisor from product, decrement trial digit. - // Subtract product from remainder. - // If product was less than remainder at the last compare: - // Compare new remainder and divisor. - // If remainder is greater than divisor: - // Subtract divisor from remainder, increment trial digit. - - if (n > 1) { - - // n may be > base only when base is 3. - if (n >= base) n = base - 1; - - // product = divisor * trial digit. - prod = multiply(yc, n, base); - prodL = prod.length; - remL = rem.length; - - // Compare product and remainder. - // If product > remainder then trial digit n too high. - // n is 1 too high about 5% of the time, and is not known to have - // ever been more than 1 too high. - while (compare(prod, rem, prodL, remL) == 1) { - n--; - - // Subtract divisor from product. - subtract(prod, yL < prodL ? yz : yc, prodL, base); - prodL = prod.length; - cmp = 1; - } - } else { - - // n is 0 or 1, cmp is -1. - // If n is 0, there is no need to compare yc and rem again below, - // so change cmp to 1 to avoid it. - // If n is 1, leave cmp as -1, so yc and rem are compared again. - if (n == 0) { - - // divisor < remainder, so n must be at least 1. - cmp = n = 1; - } - - // product = divisor - prod = yc.slice(); - prodL = prod.length; - } - - if (prodL < remL) prod = [0].concat(prod); - - // Subtract product from remainder. - subtract(rem, prod, remL, base); - remL = rem.length; - - // If product was < remainder. - if (cmp == -1) { - - // Compare divisor and new remainder. - // If divisor < new remainder, subtract divisor from remainder. - // Trial digit n too low. - // n is 1 too low about 5% of the time, and very rarely 2 too low. - while (compare(yc, rem, yL, remL) < 1) { - n++; - - // Subtract divisor from remainder. - subtract(rem, yL < remL ? yz : yc, remL, base); - remL = rem.length; - } - } - } else if (cmp === 0) { - n++; - rem = [0]; - } // else cmp === 1 and n will be 0 - - // Add the next digit, n, to the result array. - qc[i++] = n; - - // Update the remainder. - if (rem[0]) { - rem[remL++] = xc[xi] || 0; - } else { - rem = [xc[xi]]; - remL = 1; - } - } while ((xi++ < xL || rem[0] != null) && s--); - - more = rem[0] != null; - - // Leading zero? - if (!qc[0]) qc.splice(0, 1); - } - - if (base == BASE) { - - // To calculate q.e, first get the number of digits of qc[0]. - for (i = 1, s = qc[0]; s >= 10; s /= 10, i++); - - round(q, dp + (q.e = i + e * LOG_BASE - 1) + 1, rm, more); - - // Caller is convertBase. - } else { - q.e = e; - q.r = +more; - } - - return q; - }; - })(); - - - /* - * Return a string representing the value of BigNumber n in fixed-point or exponential - * notation rounded to the specified decimal places or significant digits. - * - * n: a BigNumber. - * i: the index of the last digit required (i.e. the digit that may be rounded up). - * rm: the rounding mode. - * id: 1 (toExponential) or 2 (toPrecision). - */ - function format(n, i, rm, id) { - var c0, e, ne, len, str; - - if (rm == null) rm = ROUNDING_MODE; - else intCheck(rm, 0, 8); - - if (!n.c) return n.toString(); - - c0 = n.c[0]; - ne = n.e; - - if (i == null) { - str = coeffToString(n.c); - str = id == 1 || id == 2 && (ne <= TO_EXP_NEG || ne >= TO_EXP_POS) - ? toExponential(str, ne) - : toFixedPoint(str, ne, '0'); - } else { - n = round(new BigNumber(n), i, rm); - - // n.e may have changed if the value was rounded up. - e = n.e; - - str = coeffToString(n.c); - len = str.length; - - // toPrecision returns exponential notation if the number of significant digits - // specified is less than the number of digits necessary to represent the integer - // part of the value in fixed-point notation. - - // Exponential notation. - if (id == 1 || id == 2 && (i <= e || e <= TO_EXP_NEG)) { - - // Append zeros? - for (; len < i; str += '0', len++); - str = toExponential(str, e); - - // Fixed-point notation. - } else { - i -= ne; - str = toFixedPoint(str, e, '0'); - - // Append zeros? - if (e + 1 > len) { - if (--i > 0) for (str += '.'; i--; str += '0'); - } else { - i += e - len; - if (i > 0) { - if (e + 1 == len) str += '.'; - for (; i--; str += '0'); - } - } - } - } - - return n.s < 0 && c0 ? '-' + str : str; - } - - - // Handle BigNumber.max and BigNumber.min. - function maxOrMin(args, method) { - var n, - i = 1, - m = new BigNumber(args[0]); - - for (; i < args.length; i++) { - n = new BigNumber(args[i]); - - // If any number is NaN, return NaN. - if (!n.s) { - m = n; - break; - } else if (method.call(m, n)) { - m = n; - } - } - - return m; - } - - - /* - * Strip trailing zeros, calculate base 10 exponent and check against MIN_EXP and MAX_EXP. - * Called by minus, plus and times. - */ - function normalise(n, c, e) { - var i = 1, - j = c.length; - - // Remove trailing zeros. - for (; !c[--j]; c.pop()); - - // Calculate the base 10 exponent. First get the number of digits of c[0]. - for (j = c[0]; j >= 10; j /= 10, i++); - - // Overflow? - if ((e = i + e * LOG_BASE - 1) > MAX_EXP) { - - // Infinity. - n.c = n.e = null; - - // Underflow? - } else if (e < MIN_EXP) { - - // Zero. - n.c = [n.e = 0]; - } else { - n.e = e; - n.c = c; - } - - return n; - } - - - // Handle values that fail the validity test in BigNumber. - parseNumeric = (function () { - var basePrefix = /^(-?)0([xbo])(?=\w[\w.]*$)/i, - dotAfter = /^([^.]+)\.$/, - dotBefore = /^\.([^.]+)$/, - isInfinityOrNaN = /^-?(Infinity|NaN)$/, - whitespaceOrPlus = /^\s*\+(?=[\w.])|^\s+|\s+$/g; - - return function (x, str, isNum, b) { - var base, - s = isNum ? str : str.replace(whitespaceOrPlus, ''); - - // No exception on ±Infinity or NaN. - if (isInfinityOrNaN.test(s)) { - x.s = isNaN(s) ? null : s < 0 ? -1 : 1; - } else { - if (!isNum) { - - // basePrefix = /^(-?)0([xbo])(?=\w[\w.]*$)/i - s = s.replace(basePrefix, function (m, p1, p2) { - base = (p2 = p2.toLowerCase()) == 'x' ? 16 : p2 == 'b' ? 2 : 8; - return !b || b == base ? p1 : m; - }); - - if (b) { - base = b; - - // E.g. '1.' to '1', '.1' to '0.1' - s = s.replace(dotAfter, '$1').replace(dotBefore, '0.$1'); - } - - if (str != s) return new BigNumber(s, base); - } - - // '[BigNumber Error] Not a number: {n}' - // '[BigNumber Error] Not a base {b} number: {n}' - if (BigNumber.DEBUG) { - throw Error - (bignumberError + 'Not a' + (b ? ' base ' + b : '') + ' number: ' + str); - } - - // NaN - x.s = null; - } - - x.c = x.e = null; - } - })(); - - - /* - * Round x to sd significant digits using rounding mode rm. Check for over/under-flow. - * If r is truthy, it is known that there are more digits after the rounding digit. - */ - function round(x, sd, rm, r) { - var d, i, j, k, n, ni, rd, - xc = x.c, - pows10 = POWS_TEN; - - // if x is not Infinity or NaN... - if (xc) { - - // rd is the rounding digit, i.e. the digit after the digit that may be rounded up. - // n is a base 1e14 number, the value of the element of array x.c containing rd. - // ni is the index of n within x.c. - // d is the number of digits of n. - // i is the index of rd within n including leading zeros. - // j is the actual index of rd within n (if < 0, rd is a leading zero). - out: { - - // Get the number of digits of the first element of xc. - for (d = 1, k = xc[0]; k >= 10; k /= 10, d++); - i = sd - d; - - // If the rounding digit is in the first element of xc... - if (i < 0) { - i += LOG_BASE; - j = sd; - n = xc[ni = 0]; - - // Get the rounding digit at index j of n. - rd = n / pows10[d - j - 1] % 10 | 0; - } else { - ni = mathceil((i + 1) / LOG_BASE); - - if (ni >= xc.length) { - - if (r) { - - // Needed by sqrt. - for (; xc.length <= ni; xc.push(0)); - n = rd = 0; - d = 1; - i %= LOG_BASE; - j = i - LOG_BASE + 1; - } else { - break out; - } - } else { - n = k = xc[ni]; - - // Get the number of digits of n. - for (d = 1; k >= 10; k /= 10, d++); - - // Get the index of rd within n. - i %= LOG_BASE; - - // Get the index of rd within n, adjusted for leading zeros. - // The number of leading zeros of n is given by LOG_BASE - d. - j = i - LOG_BASE + d; - - // Get the rounding digit at index j of n. - rd = j < 0 ? 0 : n / pows10[d - j - 1] % 10 | 0; - } - } - - r = r || sd < 0 || - - // Are there any non-zero digits after the rounding digit? - // The expression n % pows10[d - j - 1] returns all digits of n to the right - // of the digit at j, e.g. if n is 908714 and j is 2, the expression gives 714. - xc[ni + 1] != null || (j < 0 ? n : n % pows10[d - j - 1]); - - r = rm < 4 - ? (rd || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2)) - : rd > 5 || rd == 5 && (rm == 4 || r || rm == 6 && - - // Check whether the digit to the left of the rounding digit is odd. - ((i > 0 ? j > 0 ? n / pows10[d - j] : 0 : xc[ni - 1]) % 10) & 1 || - rm == (x.s < 0 ? 8 : 7)); - - if (sd < 1 || !xc[0]) { - xc.length = 0; - - if (r) { - - // Convert sd to decimal places. - sd -= x.e + 1; - - // 1, 0.1, 0.01, 0.001, 0.0001 etc. - xc[0] = pows10[(LOG_BASE - sd % LOG_BASE) % LOG_BASE]; - x.e = -sd || 0; - } else { - - // Zero. - xc[0] = x.e = 0; - } - - return x; - } - - // Remove excess digits. - if (i == 0) { - xc.length = ni; - k = 1; - ni--; - } else { - xc.length = ni + 1; - k = pows10[LOG_BASE - i]; - - // E.g. 56700 becomes 56000 if 7 is the rounding digit. - // j > 0 means i > number of leading zeros of n. - xc[ni] = j > 0 ? mathfloor(n / pows10[d - j] % pows10[j]) * k : 0; - } - - // Round up? - if (r) { - - for (; ;) { - - // If the digit to be rounded up is in the first element of xc... - if (ni == 0) { - - // i will be the length of xc[0] before k is added. - for (i = 1, j = xc[0]; j >= 10; j /= 10, i++); - j = xc[0] += k; - for (k = 1; j >= 10; j /= 10, k++); - - // if i != k the length has increased. - if (i != k) { - x.e++; - if (xc[0] == BASE) xc[0] = 1; - } - - break; - } else { - xc[ni] += k; - if (xc[ni] != BASE) break; - xc[ni--] = 0; - k = 1; - } - } - } - - // Remove trailing zeros. - for (i = xc.length; xc[--i] === 0; xc.pop()); - } - - // Overflow? Infinity. - if (x.e > MAX_EXP) { - x.c = x.e = null; - - // Underflow? Zero. - } else if (x.e < MIN_EXP) { - x.c = [x.e = 0]; - } - } - - return x; - } - - - function valueOf(n) { - var str, - e = n.e; - - if (e === null) return n.toString(); - - str = coeffToString(n.c); - - str = e <= TO_EXP_NEG || e >= TO_EXP_POS - ? toExponential(str, e) - : toFixedPoint(str, e, '0'); - - return n.s < 0 ? '-' + str : str; - } - - - // PROTOTYPE/INSTANCE METHODS - - - /* - * Return a new BigNumber whose value is the absolute value of this BigNumber. - */ - P.absoluteValue = P.abs = function () { - var x = new BigNumber(this); - if (x.s < 0) x.s = 1; - return x; - }; - - - /* - * Return - * 1 if the value of this BigNumber is greater than the value of BigNumber(y, b), - * -1 if the value of this BigNumber is less than the value of BigNumber(y, b), - * 0 if they have the same value, - * or null if the value of either is NaN. - */ - P.comparedTo = function (y, b) { - return compare(this, new BigNumber(y, b)); - }; - - - /* - * If dp is undefined or null or true or false, return the number of decimal places of the - * value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN. - * - * Otherwise, if dp is a number, return a new BigNumber whose value is the value of this - * BigNumber rounded to a maximum of dp decimal places using rounding mode rm, or - * ROUNDING_MODE if rm is omitted. - * - * [dp] {number} Decimal places: integer, 0 to MAX inclusive. - * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. - * - * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' - */ - P.decimalPlaces = P.dp = function (dp, rm) { - var c, n, v, - x = this; - - if (dp != null) { - intCheck(dp, 0, MAX); - if (rm == null) rm = ROUNDING_MODE; - else intCheck(rm, 0, 8); - - return round(new BigNumber(x), dp + x.e + 1, rm); - } - - if (!(c = x.c)) return null; - n = ((v = c.length - 1) - bitFloor(this.e / LOG_BASE)) * LOG_BASE; - - // Subtract the number of trailing zeros of the last number. - if (v = c[v]) for (; v % 10 == 0; v /= 10, n--); - if (n < 0) n = 0; - - return n; - }; - - - /* - * n / 0 = I - * n / N = N - * n / I = 0 - * 0 / n = 0 - * 0 / 0 = N - * 0 / N = N - * 0 / I = 0 - * N / n = N - * N / 0 = N - * N / N = N - * N / I = N - * I / n = I - * I / 0 = I - * I / N = N - * I / I = N - * - * Return a new BigNumber whose value is the value of this BigNumber divided by the value of - * BigNumber(y, b), rounded according to DECIMAL_PLACES and ROUNDING_MODE. - */ - P.dividedBy = P.div = function (y, b) { - return div(this, new BigNumber(y, b), DECIMAL_PLACES, ROUNDING_MODE); - }; - - - /* - * Return a new BigNumber whose value is the integer part of dividing the value of this - * BigNumber by the value of BigNumber(y, b). - */ - P.dividedToIntegerBy = P.idiv = function (y, b) { - return div(this, new BigNumber(y, b), 0, 1); - }; - - - /* - * Return a BigNumber whose value is the value of this BigNumber exponentiated by n. - * - * If m is present, return the result modulo m. - * If n is negative round according to DECIMAL_PLACES and ROUNDING_MODE. - * If POW_PRECISION is non-zero and m is not present, round to POW_PRECISION using ROUNDING_MODE. - * - * The modular power operation works efficiently when x, n, and m are integers, otherwise it - * is equivalent to calculating x.exponentiatedBy(n).modulo(m) with a POW_PRECISION of 0. - * - * n {number|string|BigNumber} The exponent. An integer. - * [m] {number|string|BigNumber} The modulus. - * - * '[BigNumber Error] Exponent not an integer: {n}' - */ - P.exponentiatedBy = P.pow = function (n, m) { - var half, isModExp, i, k, more, nIsBig, nIsNeg, nIsOdd, y, - x = this; - - n = new BigNumber(n); - - // Allow NaN and ±Infinity, but not other non-integers. - if (n.c && !n.isInteger()) { - throw Error - (bignumberError + 'Exponent not an integer: ' + valueOf(n)); - } - - if (m != null) m = new BigNumber(m); - - // Exponent of MAX_SAFE_INTEGER is 15. - nIsBig = n.e > 14; - - // If x is NaN, ±Infinity, ±0 or ±1, or n is ±Infinity, NaN or ±0. - if (!x.c || !x.c[0] || x.c[0] == 1 && !x.e && x.c.length == 1 || !n.c || !n.c[0]) { - - // The sign of the result of pow when x is negative depends on the evenness of n. - // If +n overflows to ±Infinity, the evenness of n would be not be known. - y = new BigNumber(Math.pow(+valueOf(x), nIsBig ? 2 - isOdd(n) : +valueOf(n))); - return m ? y.mod(m) : y; - } - - nIsNeg = n.s < 0; - - if (m) { - - // x % m returns NaN if abs(m) is zero, or m is NaN. - if (m.c ? !m.c[0] : !m.s) return new BigNumber(NaN); - - isModExp = !nIsNeg && x.isInteger() && m.isInteger(); - - if (isModExp) x = x.mod(m); - - // Overflow to ±Infinity: >=2**1e10 or >=1.0000024**1e15. - // Underflow to ±0: <=0.79**1e10 or <=0.9999975**1e15. - } else if (n.e > 9 && (x.e > 0 || x.e < -1 || (x.e == 0 - // [1, 240000000] - ? x.c[0] > 1 || nIsBig && x.c[1] >= 24e7 - // [80000000000000] [99999750000000] - : x.c[0] < 8e13 || nIsBig && x.c[0] <= 9999975e7))) { - - // If x is negative and n is odd, k = -0, else k = 0. - k = x.s < 0 && isOdd(n) ? -0 : 0; - - // If x >= 1, k = ±Infinity. - if (x.e > -1) k = 1 / k; - - // If n is negative return ±0, else return ±Infinity. - return new BigNumber(nIsNeg ? 1 / k : k); - - } else if (POW_PRECISION) { - - // Truncating each coefficient array to a length of k after each multiplication - // equates to truncating significant digits to POW_PRECISION + [28, 41], - // i.e. there will be a minimum of 28 guard digits retained. - k = mathceil(POW_PRECISION / LOG_BASE + 2); - } - - if (nIsBig) { - half = new BigNumber(0.5); - if (nIsNeg) n.s = 1; - nIsOdd = isOdd(n); - } else { - i = Math.abs(+valueOf(n)); - nIsOdd = i % 2; - } - - y = new BigNumber(ONE); - - // Performs 54 loop iterations for n of 9007199254740991. - for (; ;) { - - if (nIsOdd) { - y = y.times(x); - if (!y.c) break; - - if (k) { - if (y.c.length > k) y.c.length = k; - } else if (isModExp) { - y = y.mod(m); //y = y.minus(div(y, m, 0, MODULO_MODE).times(m)); - } - } - - if (i) { - i = mathfloor(i / 2); - if (i === 0) break; - nIsOdd = i % 2; - } else { - n = n.times(half); - round(n, n.e + 1, 1); - - if (n.e > 14) { - nIsOdd = isOdd(n); - } else { - i = +valueOf(n); - if (i === 0) break; - nIsOdd = i % 2; - } - } - - x = x.times(x); - - if (k) { - if (x.c && x.c.length > k) x.c.length = k; - } else if (isModExp) { - x = x.mod(m); //x = x.minus(div(x, m, 0, MODULO_MODE).times(m)); - } - } - - if (isModExp) return y; - if (nIsNeg) y = ONE.div(y); - - return m ? y.mod(m) : k ? round(y, POW_PRECISION, ROUNDING_MODE, more) : y; - }; - - - /* - * Return a new BigNumber whose value is the value of this BigNumber rounded to an integer - * using rounding mode rm, or ROUNDING_MODE if rm is omitted. - * - * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. - * - * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {rm}' - */ - P.integerValue = function (rm) { - var n = new BigNumber(this); - if (rm == null) rm = ROUNDING_MODE; - else intCheck(rm, 0, 8); - return round(n, n.e + 1, rm); - }; - - - /* - * Return true if the value of this BigNumber is equal to the value of BigNumber(y, b), - * otherwise return false. - */ - P.isEqualTo = P.eq = function (y, b) { - return compare(this, new BigNumber(y, b)) === 0; - }; - - - /* - * Return true if the value of this BigNumber is a finite number, otherwise return false. - */ - P.isFinite = function () { - return !!this.c; - }; - - - /* - * Return true if the value of this BigNumber is greater than the value of BigNumber(y, b), - * otherwise return false. - */ - P.isGreaterThan = P.gt = function (y, b) { - return compare(this, new BigNumber(y, b)) > 0; - }; - - - /* - * Return true if the value of this BigNumber is greater than or equal to the value of - * BigNumber(y, b), otherwise return false. - */ - P.isGreaterThanOrEqualTo = P.gte = function (y, b) { - return (b = compare(this, new BigNumber(y, b))) === 1 || b === 0; - - }; - - - /* - * Return true if the value of this BigNumber is an integer, otherwise return false. - */ - P.isInteger = function () { - return !!this.c && bitFloor(this.e / LOG_BASE) > this.c.length - 2; - }; - - - /* - * Return true if the value of this BigNumber is less than the value of BigNumber(y, b), - * otherwise return false. - */ - P.isLessThan = P.lt = function (y, b) { - return compare(this, new BigNumber(y, b)) < 0; - }; - - - /* - * Return true if the value of this BigNumber is less than or equal to the value of - * BigNumber(y, b), otherwise return false. - */ - P.isLessThanOrEqualTo = P.lte = function (y, b) { - return (b = compare(this, new BigNumber(y, b))) === -1 || b === 0; - }; - - - /* - * Return true if the value of this BigNumber is NaN, otherwise return false. - */ - P.isNaN = function () { - return !this.s; - }; - - - /* - * Return true if the value of this BigNumber is negative, otherwise return false. - */ - P.isNegative = function () { - return this.s < 0; - }; - - - /* - * Return true if the value of this BigNumber is positive, otherwise return false. - */ - P.isPositive = function () { - return this.s > 0; - }; - - - /* - * Return true if the value of this BigNumber is 0 or -0, otherwise return false. - */ - P.isZero = function () { - return !!this.c && this.c[0] == 0; - }; - - - /* - * n - 0 = n - * n - N = N - * n - I = -I - * 0 - n = -n - * 0 - 0 = 0 - * 0 - N = N - * 0 - I = -I - * N - n = N - * N - 0 = N - * N - N = N - * N - I = N - * I - n = I - * I - 0 = I - * I - N = N - * I - I = N - * - * Return a new BigNumber whose value is the value of this BigNumber minus the value of - * BigNumber(y, b). - */ - P.minus = function (y, b) { - var i, j, t, xLTy, - x = this, - a = x.s; - - y = new BigNumber(y, b); - b = y.s; - - // Either NaN? - if (!a || !b) return new BigNumber(NaN); - - // Signs differ? - if (a != b) { - y.s = -b; - return x.plus(y); - } - - var xe = x.e / LOG_BASE, - ye = y.e / LOG_BASE, - xc = x.c, - yc = y.c; - - if (!xe || !ye) { - - // Either Infinity? - if (!xc || !yc) return xc ? (y.s = -b, y) : new BigNumber(yc ? x : NaN); - - // Either zero? - if (!xc[0] || !yc[0]) { - - // Return y if y is non-zero, x if x is non-zero, or zero if both are zero. - return yc[0] ? (y.s = -b, y) : new BigNumber(xc[0] ? x : - - // IEEE 754 (2008) 6.3: n - n = -0 when rounding to -Infinity - ROUNDING_MODE == 3 ? -0 : 0); - } - } - - xe = bitFloor(xe); - ye = bitFloor(ye); - xc = xc.slice(); - - // Determine which is the bigger number. - if (a = xe - ye) { - - if (xLTy = a < 0) { - a = -a; - t = xc; - } else { - ye = xe; - t = yc; - } - - t.reverse(); - - // Prepend zeros to equalise exponents. - for (b = a; b--; t.push(0)); - t.reverse(); - } else { - - // Exponents equal. Check digit by digit. - j = (xLTy = (a = xc.length) < (b = yc.length)) ? a : b; - - for (a = b = 0; b < j; b++) { - - if (xc[b] != yc[b]) { - xLTy = xc[b] < yc[b]; - break; - } - } - } - - // x < y? Point xc to the array of the bigger number. - if (xLTy) t = xc, xc = yc, yc = t, y.s = -y.s; - - b = (j = yc.length) - (i = xc.length); - - // Append zeros to xc if shorter. - // No need to add zeros to yc if shorter as subtract only needs to start at yc.length. - if (b > 0) for (; b--; xc[i++] = 0); - b = BASE - 1; - - // Subtract yc from xc. - for (; j > a;) { - - if (xc[--j] < yc[j]) { - for (i = j; i && !xc[--i]; xc[i] = b); - --xc[i]; - xc[j] += BASE; - } - - xc[j] -= yc[j]; - } - - // Remove leading zeros and adjust exponent accordingly. - for (; xc[0] == 0; xc.splice(0, 1), --ye); - - // Zero? - if (!xc[0]) { - - // Following IEEE 754 (2008) 6.3, - // n - n = +0 but n - n = -0 when rounding towards -Infinity. - y.s = ROUNDING_MODE == 3 ? -1 : 1; - y.c = [y.e = 0]; - return y; - } - - // No need to check for Infinity as +x - +y != Infinity && -x - -y != Infinity - // for finite x and y. - return normalise(y, xc, ye); - }; - - - /* - * n % 0 = N - * n % N = N - * n % I = n - * 0 % n = 0 - * -0 % n = -0 - * 0 % 0 = N - * 0 % N = N - * 0 % I = 0 - * N % n = N - * N % 0 = N - * N % N = N - * N % I = N - * I % n = N - * I % 0 = N - * I % N = N - * I % I = N - * - * Return a new BigNumber whose value is the value of this BigNumber modulo the value of - * BigNumber(y, b). The result depends on the value of MODULO_MODE. - */ - P.modulo = P.mod = function (y, b) { - var q, s, - x = this; - - y = new BigNumber(y, b); - - // Return NaN if x is Infinity or NaN, or y is NaN or zero. - if (!x.c || !y.s || y.c && !y.c[0]) { - return new BigNumber(NaN); - - // Return x if y is Infinity or x is zero. - } else if (!y.c || x.c && !x.c[0]) { - return new BigNumber(x); - } - - if (MODULO_MODE == 9) { - - // Euclidian division: q = sign(y) * floor(x / abs(y)) - // r = x - qy where 0 <= r < abs(y) - s = y.s; - y.s = 1; - q = div(x, y, 0, 3); - y.s = s; - q.s *= s; - } else { - q = div(x, y, 0, MODULO_MODE); - } - - y = x.minus(q.times(y)); - - // To match JavaScript %, ensure sign of zero is sign of dividend. - if (!y.c[0] && MODULO_MODE == 1) y.s = x.s; - - return y; - }; - - - /* - * n * 0 = 0 - * n * N = N - * n * I = I - * 0 * n = 0 - * 0 * 0 = 0 - * 0 * N = N - * 0 * I = N - * N * n = N - * N * 0 = N - * N * N = N - * N * I = N - * I * n = I - * I * 0 = N - * I * N = N - * I * I = I - * - * Return a new BigNumber whose value is the value of this BigNumber multiplied by the value - * of BigNumber(y, b). - */ - P.multipliedBy = P.times = function (y, b) { - var c, e, i, j, k, m, xcL, xlo, xhi, ycL, ylo, yhi, zc, - base, sqrtBase, - x = this, - xc = x.c, - yc = (y = new BigNumber(y, b)).c; - - // Either NaN, ±Infinity or ±0? - if (!xc || !yc || !xc[0] || !yc[0]) { - - // Return NaN if either is NaN, or one is 0 and the other is Infinity. - if (!x.s || !y.s || xc && !xc[0] && !yc || yc && !yc[0] && !xc) { - y.c = y.e = y.s = null; - } else { - y.s *= x.s; - - // Return ±Infinity if either is ±Infinity. - if (!xc || !yc) { - y.c = y.e = null; - - // Return ±0 if either is ±0. - } else { - y.c = [0]; - y.e = 0; - } - } - - return y; - } - - e = bitFloor(x.e / LOG_BASE) + bitFloor(y.e / LOG_BASE); - y.s *= x.s; - xcL = xc.length; - ycL = yc.length; - - // Ensure xc points to longer array and xcL to its length. - if (xcL < ycL) zc = xc, xc = yc, yc = zc, i = xcL, xcL = ycL, ycL = i; - - // Initialise the result array with zeros. - for (i = xcL + ycL, zc = []; i--; zc.push(0)); - - base = BASE; - sqrtBase = SQRT_BASE; - - for (i = ycL; --i >= 0;) { - c = 0; - ylo = yc[i] % sqrtBase; - yhi = yc[i] / sqrtBase | 0; - - for (k = xcL, j = i + k; j > i;) { - xlo = xc[--k] % sqrtBase; - xhi = xc[k] / sqrtBase | 0; - m = yhi * xlo + xhi * ylo; - xlo = ylo * xlo + ((m % sqrtBase) * sqrtBase) + zc[j] + c; - c = (xlo / base | 0) + (m / sqrtBase | 0) + yhi * xhi; - zc[j--] = xlo % base; - } - - zc[j] = c; - } - - if (c) { - ++e; - } else { - zc.splice(0, 1); - } - - return normalise(y, zc, e); - }; - - - /* - * Return a new BigNumber whose value is the value of this BigNumber negated, - * i.e. multiplied by -1. - */ - P.negated = function () { - var x = new BigNumber(this); - x.s = -x.s || null; - return x; - }; - - - /* - * n + 0 = n - * n + N = N - * n + I = I - * 0 + n = n - * 0 + 0 = 0 - * 0 + N = N - * 0 + I = I - * N + n = N - * N + 0 = N - * N + N = N - * N + I = N - * I + n = I - * I + 0 = I - * I + N = N - * I + I = I - * - * Return a new BigNumber whose value is the value of this BigNumber plus the value of - * BigNumber(y, b). - */ - P.plus = function (y, b) { - var t, - x = this, - a = x.s; - - y = new BigNumber(y, b); - b = y.s; - - // Either NaN? - if (!a || !b) return new BigNumber(NaN); - - // Signs differ? - if (a != b) { - y.s = -b; - return x.minus(y); - } - - var xe = x.e / LOG_BASE, - ye = y.e / LOG_BASE, - xc = x.c, - yc = y.c; - - if (!xe || !ye) { - - // Return ±Infinity if either ±Infinity. - if (!xc || !yc) return new BigNumber(a / 0); - - // Either zero? - // Return y if y is non-zero, x if x is non-zero, or zero if both are zero. - if (!xc[0] || !yc[0]) return yc[0] ? y : new BigNumber(xc[0] ? x : a * 0); - } - - xe = bitFloor(xe); - ye = bitFloor(ye); - xc = xc.slice(); - - // Prepend zeros to equalise exponents. Faster to use reverse then do unshifts. - if (a = xe - ye) { - if (a > 0) { - ye = xe; - t = yc; - } else { - a = -a; - t = xc; - } - - t.reverse(); - for (; a--; t.push(0)); - t.reverse(); - } - - a = xc.length; - b = yc.length; - - // Point xc to the longer array, and b to the shorter length. - if (a - b < 0) t = yc, yc = xc, xc = t, b = a; - - // Only start adding at yc.length - 1 as the further digits of xc can be ignored. - for (a = 0; b;) { - a = (xc[--b] = xc[b] + yc[b] + a) / BASE | 0; - xc[b] = BASE === xc[b] ? 0 : xc[b] % BASE; - } - - if (a) { - xc = [a].concat(xc); - ++ye; - } - - // No need to check for zero, as +x + +y != 0 && -x + -y != 0 - // ye = MAX_EXP + 1 possible - return normalise(y, xc, ye); - }; - - - /* - * If sd is undefined or null or true or false, return the number of significant digits of - * the value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN. - * If sd is true include integer-part trailing zeros in the count. - * - * Otherwise, if sd is a number, return a new BigNumber whose value is the value of this - * BigNumber rounded to a maximum of sd significant digits using rounding mode rm, or - * ROUNDING_MODE if rm is omitted. - * - * sd {number|boolean} number: significant digits: integer, 1 to MAX inclusive. - * boolean: whether to count integer-part trailing zeros: true or false. - * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. - * - * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}' - */ - P.precision = P.sd = function (sd, rm) { - var c, n, v, - x = this; - - if (sd != null && sd !== !!sd) { - intCheck(sd, 1, MAX); - if (rm == null) rm = ROUNDING_MODE; - else intCheck(rm, 0, 8); - - return round(new BigNumber(x), sd, rm); - } - - if (!(c = x.c)) return null; - v = c.length - 1; - n = v * LOG_BASE + 1; - - if (v = c[v]) { - - // Subtract the number of trailing zeros of the last element. - for (; v % 10 == 0; v /= 10, n--); - - // Add the number of digits of the first element. - for (v = c[0]; v >= 10; v /= 10, n++); - } - - if (sd && x.e + 1 > n) n = x.e + 1; - - return n; - }; - - - /* - * Return a new BigNumber whose value is the value of this BigNumber shifted by k places - * (powers of 10). Shift to the right if n > 0, and to the left if n < 0. - * - * k {number} Integer, -MAX_SAFE_INTEGER to MAX_SAFE_INTEGER inclusive. - * - * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {k}' - */ - P.shiftedBy = function (k) { - intCheck(k, -MAX_SAFE_INTEGER, MAX_SAFE_INTEGER); - return this.times('1e' + k); - }; - - - /* - * sqrt(-n) = N - * sqrt(N) = N - * sqrt(-I) = N - * sqrt(I) = I - * sqrt(0) = 0 - * sqrt(-0) = -0 - * - * Return a new BigNumber whose value is the square root of the value of this BigNumber, - * rounded according to DECIMAL_PLACES and ROUNDING_MODE. - */ - P.squareRoot = P.sqrt = function () { - var m, n, r, rep, t, - x = this, - c = x.c, - s = x.s, - e = x.e, - dp = DECIMAL_PLACES + 4, - half = new BigNumber('0.5'); - - // Negative/NaN/Infinity/zero? - if (s !== 1 || !c || !c[0]) { - return new BigNumber(!s || s < 0 && (!c || c[0]) ? NaN : c ? x : 1 / 0); - } - - // Initial estimate. - s = Math.sqrt(+valueOf(x)); - - // Math.sqrt underflow/overflow? - // Pass x to Math.sqrt as integer, then adjust the exponent of the result. - if (s == 0 || s == 1 / 0) { - n = coeffToString(c); - if ((n.length + e) % 2 == 0) n += '0'; - s = Math.sqrt(+n); - e = bitFloor((e + 1) / 2) - (e < 0 || e % 2); - - if (s == 1 / 0) { - n = '1e' + e; - } else { - n = s.toExponential(); - n = n.slice(0, n.indexOf('e') + 1) + e; - } - - r = new BigNumber(n); - } else { - r = new BigNumber(s + ''); - } - - // Check for zero. - // r could be zero if MIN_EXP is changed after the this value was created. - // This would cause a division by zero (x/t) and hence Infinity below, which would cause - // coeffToString to throw. - if (r.c[0]) { - e = r.e; - s = e + dp; - if (s < 3) s = 0; - - // Newton-Raphson iteration. - for (; ;) { - t = r; - r = half.times(t.plus(div(x, t, dp, 1))); - - if (coeffToString(t.c).slice(0, s) === (n = coeffToString(r.c)).slice(0, s)) { - - // The exponent of r may here be one less than the final result exponent, - // e.g 0.0009999 (e-4) --> 0.001 (e-3), so adjust s so the rounding digits - // are indexed correctly. - if (r.e < e) --s; - n = n.slice(s - 3, s + 1); - - // The 4th rounding digit may be in error by -1 so if the 4 rounding digits - // are 9999 or 4999 (i.e. approaching a rounding boundary) continue the - // iteration. - if (n == '9999' || !rep && n == '4999') { - - // On the first iteration only, check to see if rounding up gives the - // exact result as the nines may infinitely repeat. - if (!rep) { - round(t, t.e + DECIMAL_PLACES + 2, 0); - - if (t.times(t).eq(x)) { - r = t; - break; - } - } - - dp += 4; - s += 4; - rep = 1; - } else { - - // If rounding digits are null, 0{0,4} or 50{0,3}, check for exact - // result. If not, then there are further digits and m will be truthy. - if (!+n || !+n.slice(1) && n.charAt(0) == '5') { - - // Truncate to the first rounding digit. - round(r, r.e + DECIMAL_PLACES + 2, 1); - m = !r.times(r).eq(x); - } - - break; - } - } - } - } - - return round(r, r.e + DECIMAL_PLACES + 1, ROUNDING_MODE, m); - }; - - - /* - * Return a string representing the value of this BigNumber in exponential notation and - * rounded using ROUNDING_MODE to dp fixed decimal places. - * - * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. - * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. - * - * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' - */ - P.toExponential = function (dp, rm) { - if (dp != null) { - intCheck(dp, 0, MAX); - dp++; - } - return format(this, dp, rm, 1); - }; - - - /* - * Return a string representing the value of this BigNumber in fixed-point notation rounding - * to dp fixed decimal places using rounding mode rm, or ROUNDING_MODE if rm is omitted. - * - * Note: as with JavaScript's number type, (-0).toFixed(0) is '0', - * but e.g. (-0.00001).toFixed(0) is '-0'. - * - * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. - * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. - * - * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' - */ - P.toFixed = function (dp, rm) { - if (dp != null) { - intCheck(dp, 0, MAX); - dp = dp + this.e + 1; - } - return format(this, dp, rm); - }; - - - /* - * Return a string representing the value of this BigNumber in fixed-point notation rounded - * using rm or ROUNDING_MODE to dp decimal places, and formatted according to the properties - * of the format or FORMAT object (see BigNumber.set). - * - * The formatting object may contain some or all of the properties shown below. - * - * FORMAT = { - * prefix: '', - * groupSize: 3, - * secondaryGroupSize: 0, - * groupSeparator: ',', - * decimalSeparator: '.', - * fractionGroupSize: 0, - * fractionGroupSeparator: '\xA0', // non-breaking space - * suffix: '' - * }; - * - * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. - * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. - * [format] {object} Formatting options. See FORMAT pbject above. - * - * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' - * '[BigNumber Error] Argument not an object: {format}' - */ - P.toFormat = function (dp, rm, format) { - var str, - x = this; - - if (format == null) { - if (dp != null && rm && typeof rm == 'object') { - format = rm; - rm = null; - } else if (dp && typeof dp == 'object') { - format = dp; - dp = rm = null; - } else { - format = FORMAT; - } - } else if (typeof format != 'object') { - throw Error - (bignumberError + 'Argument not an object: ' + format); - } - - str = x.toFixed(dp, rm); - - if (x.c) { - var i, - arr = str.split('.'), - g1 = +format.groupSize, - g2 = +format.secondaryGroupSize, - groupSeparator = format.groupSeparator || '', - intPart = arr[0], - fractionPart = arr[1], - isNeg = x.s < 0, - intDigits = isNeg ? intPart.slice(1) : intPart, - len = intDigits.length; - - if (g2) i = g1, g1 = g2, g2 = i, len -= i; - - if (g1 > 0 && len > 0) { - i = len % g1 || g1; - intPart = intDigits.substr(0, i); - for (; i < len; i += g1) intPart += groupSeparator + intDigits.substr(i, g1); - if (g2 > 0) intPart += groupSeparator + intDigits.slice(i); - if (isNeg) intPart = '-' + intPart; - } - - str = fractionPart - ? intPart + (format.decimalSeparator || '') + ((g2 = +format.fractionGroupSize) - ? fractionPart.replace(new RegExp('\\d{' + g2 + '}\\B', 'g'), - '$&' + (format.fractionGroupSeparator || '')) - : fractionPart) - : intPart; - } - - return (format.prefix || '') + str + (format.suffix || ''); - }; - - - /* - * Return an array of two BigNumbers representing the value of this BigNumber as a simple - * fraction with an integer numerator and an integer denominator. - * The denominator will be a positive non-zero value less than or equal to the specified - * maximum denominator. If a maximum denominator is not specified, the denominator will be - * the lowest value necessary to represent the number exactly. - * - * [md] {number|string|BigNumber} Integer >= 1, or Infinity. The maximum denominator. - * - * '[BigNumber Error] Argument {not an integer|out of range} : {md}' - */ - P.toFraction = function (md) { - var d, d0, d1, d2, e, exp, n, n0, n1, q, r, s, - x = this, - xc = x.c; - - if (md != null) { - n = new BigNumber(md); - - // Throw if md is less than one or is not an integer, unless it is Infinity. - if (!n.isInteger() && (n.c || n.s !== 1) || n.lt(ONE)) { - throw Error - (bignumberError + 'Argument ' + - (n.isInteger() ? 'out of range: ' : 'not an integer: ') + valueOf(n)); - } - } - - if (!xc) return new BigNumber(x); - - d = new BigNumber(ONE); - n1 = d0 = new BigNumber(ONE); - d1 = n0 = new BigNumber(ONE); - s = coeffToString(xc); - - // Determine initial denominator. - // d is a power of 10 and the minimum max denominator that specifies the value exactly. - e = d.e = s.length - x.e - 1; - d.c[0] = POWS_TEN[(exp = e % LOG_BASE) < 0 ? LOG_BASE + exp : exp]; - md = !md || n.comparedTo(d) > 0 ? (e > 0 ? d : n1) : n; - - exp = MAX_EXP; - MAX_EXP = 1 / 0; - n = new BigNumber(s); - - // n0 = d1 = 0 - n0.c[0] = 0; - - for (; ;) { - q = div(n, d, 0, 1); - d2 = d0.plus(q.times(d1)); - if (d2.comparedTo(md) == 1) break; - d0 = d1; - d1 = d2; - n1 = n0.plus(q.times(d2 = n1)); - n0 = d2; - d = n.minus(q.times(d2 = d)); - n = d2; - } - - d2 = div(md.minus(d0), d1, 0, 1); - n0 = n0.plus(d2.times(n1)); - d0 = d0.plus(d2.times(d1)); - n0.s = n1.s = x.s; - e = e * 2; - - // Determine which fraction is closer to x, n0/d0 or n1/d1 - r = div(n1, d1, e, ROUNDING_MODE).minus(x).abs().comparedTo( - div(n0, d0, e, ROUNDING_MODE).minus(x).abs()) < 1 ? [n1, d1] : [n0, d0]; - - MAX_EXP = exp; - - return r; - }; - - - /* - * Return the value of this BigNumber converted to a number primitive. - */ - P.toNumber = function () { - return +valueOf(this); - }; - - - /* - * Return a string representing the value of this BigNumber rounded to sd significant digits - * using rounding mode rm or ROUNDING_MODE. If sd is less than the number of digits - * necessary to represent the integer part of the value in fixed-point notation, then use - * exponential notation. - * - * [sd] {number} Significant digits. Integer, 1 to MAX inclusive. - * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. - * - * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}' - */ - P.toPrecision = function (sd, rm) { - if (sd != null) intCheck(sd, 1, MAX); - return format(this, sd, rm, 2); - }; - - - /* - * Return a string representing the value of this BigNumber in base b, or base 10 if b is - * omitted. If a base is specified, including base 10, round according to DECIMAL_PLACES and - * ROUNDING_MODE. If a base is not specified, and this BigNumber has a positive exponent - * that is equal to or greater than TO_EXP_POS, or a negative exponent equal to or less than - * TO_EXP_NEG, return exponential notation. - * - * [b] {number} Integer, 2 to ALPHABET.length inclusive. - * - * '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}' - */ - P.toString = function (b) { - var str, - n = this, - s = n.s, - e = n.e; - - // Infinity or NaN? - if (e === null) { - if (s) { - str = 'Infinity'; - if (s < 0) str = '-' + str; - } else { - str = 'NaN'; - } - } else { - if (b == null) { - str = e <= TO_EXP_NEG || e >= TO_EXP_POS - ? toExponential(coeffToString(n.c), e) - : toFixedPoint(coeffToString(n.c), e, '0'); - } else if (b === 10) { - n = round(new BigNumber(n), DECIMAL_PLACES + e + 1, ROUNDING_MODE); - str = toFixedPoint(coeffToString(n.c), n.e, '0'); - } else { - intCheck(b, 2, ALPHABET.length, 'Base'); - str = convertBase(toFixedPoint(coeffToString(n.c), e, '0'), 10, b, s, true); - } - - if (s < 0 && n.c[0]) str = '-' + str; - } - - return str; - }; - - - /* - * Return as toString, but do not accept a base argument, and include the minus sign for - * negative zero. - */ - P.valueOf = P.toJSON = function () { - return valueOf(this); - }; - - - P._isBigNumber = true; - - if (configObject != null) BigNumber.set(configObject); - - return BigNumber; - } - - - // PRIVATE HELPER FUNCTIONS - - // These functions don't need access to variables, - // e.g. DECIMAL_PLACES, in the scope of the `clone` function above. - - - function bitFloor(n) { - var i = n | 0; - return n > 0 || n === i ? i : i - 1; - } - - - // Return a coefficient array as a string of base 10 digits. - function coeffToString(a) { - var s, z, - i = 1, - j = a.length, - r = a[0] + ''; - - for (; i < j;) { - s = a[i++] + ''; - z = LOG_BASE - s.length; - for (; z--; s = '0' + s); - r += s; - } - - // Determine trailing zeros. - for (j = r.length; r.charCodeAt(--j) === 48;); - - return r.slice(0, j + 1 || 1); - } - - - // Compare the value of BigNumbers x and y. - function compare(x, y) { - var a, b, - xc = x.c, - yc = y.c, - i = x.s, - j = y.s, - k = x.e, - l = y.e; - - // Either NaN? - if (!i || !j) return null; - - a = xc && !xc[0]; - b = yc && !yc[0]; - - // Either zero? - if (a || b) return a ? b ? 0 : -j : i; - - // Signs differ? - if (i != j) return i; - - a = i < 0; - b = k == l; - - // Either Infinity? - if (!xc || !yc) return b ? 0 : !xc ^ a ? 1 : -1; - - // Compare exponents. - if (!b) return k > l ^ a ? 1 : -1; - - j = (k = xc.length) < (l = yc.length) ? k : l; - - // Compare digit by digit. - for (i = 0; i < j; i++) if (xc[i] != yc[i]) return xc[i] > yc[i] ^ a ? 1 : -1; - - // Compare lengths. - return k == l ? 0 : k > l ^ a ? 1 : -1; - } - - - /* - * Check that n is a primitive number, an integer, and in range, otherwise throw. - */ - function intCheck(n, min, max, name) { - if (n < min || n > max || n !== mathfloor(n)) { - throw Error - (bignumberError + (name || 'Argument') + (typeof n == 'number' - ? n < min || n > max ? ' out of range: ' : ' not an integer: ' - : ' not a primitive number: ') + String(n)); - } - } - - - // Assumes finite n. - function isOdd(n) { - var k = n.c.length - 1; - return bitFloor(n.e / LOG_BASE) == k && n.c[k] % 2 != 0; - } - - - function toExponential(str, e) { - return (str.length > 1 ? str.charAt(0) + '.' + str.slice(1) : str) + - (e < 0 ? 'e' : 'e+') + e; - } - - - function toFixedPoint(str, e, z) { - var len, zs; - - // Negative exponent? - if (e < 0) { - - // Prepend zeros. - for (zs = z + '.'; ++e; zs += z); - str = zs + str; - - // Positive exponent - } else { - len = str.length; - - // Append zeros. - if (++e > len) { - for (zs = z, e -= len; --e; zs += z); - str += zs; - } else if (e < len) { - str = str.slice(0, e) + '.' + str.slice(e); - } - } - - return str; - } - - - // EXPORT - - - BigNumber = clone(); - BigNumber['default'] = BigNumber.BigNumber = BigNumber; - - // AMD. - if (typeof define == 'function' && define.amd) { - define(function () { return BigNumber; }); - - // Node.js and other environments that support module.exports. - } else if ( true && module.exports) { - module.exports = BigNumber; - - // Browser. - } else { - if (!globalObject) { - globalObject = typeof self != 'undefined' && self ? self : window; - } - - globalObject.BigNumber = BigNumber; - } -})(this); - - -/***/ }), - -/***/ 20336: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -var DuplexStream = __nccwpck_require__(85519).Duplex - , util = __nccwpck_require__(31669) - -function BufferList (callback) { - if (!(this instanceof BufferList)) - return new BufferList(callback) - - this._bufs = [] - this.length = 0 - - if (typeof callback == 'function') { - this._callback = callback - - var piper = function piper (err) { - if (this._callback) { - this._callback(err) - this._callback = null - } - }.bind(this) - - this.on('pipe', function onPipe (src) { - src.on('error', piper) - }) - this.on('unpipe', function onUnpipe (src) { - src.removeListener('error', piper) - }) - } else { - this.append(callback) - } - - DuplexStream.call(this) -} - - -util.inherits(BufferList, DuplexStream) - - -BufferList.prototype._offset = function _offset (offset) { - var tot = 0, i = 0, _t - if (offset === 0) return [ 0, 0 ] - for (; i < this._bufs.length; i++) { - _t = tot + this._bufs[i].length - if (offset < _t || i == this._bufs.length - 1) { - return [ i, offset - tot ] - } - tot = _t - } -} - -BufferList.prototype._reverseOffset = function (blOffset) { - var bufferId = blOffset[0] - var offset = blOffset[1] - for (var i = 0; i < bufferId; i++) { - offset += this._bufs[i].length - } - return offset -} - -BufferList.prototype.append = function append (buf) { - var i = 0 - - if (Buffer.isBuffer(buf)) { - this._appendBuffer(buf) - } else if (Array.isArray(buf)) { - for (; i < buf.length; i++) - this.append(buf[i]) - } else if (buf instanceof BufferList) { - // unwrap argument into individual BufferLists - for (; i < buf._bufs.length; i++) - this.append(buf._bufs[i]) - } else if (buf != null) { - // coerce number arguments to strings, since Buffer(number) does - // uninitialized memory allocation - if (typeof buf == 'number') - buf = buf.toString() - - this._appendBuffer(Buffer.from(buf)) - } - - return this -} - - -BufferList.prototype._appendBuffer = function appendBuffer (buf) { - this._bufs.push(buf) - this.length += buf.length -} - - -BufferList.prototype._write = function _write (buf, encoding, callback) { - this._appendBuffer(buf) - - if (typeof callback == 'function') - callback() -} - - -BufferList.prototype._read = function _read (size) { - if (!this.length) - return this.push(null) - - size = Math.min(size, this.length) - this.push(this.slice(0, size)) - this.consume(size) -} - - -BufferList.prototype.end = function end (chunk) { - DuplexStream.prototype.end.call(this, chunk) - - if (this._callback) { - this._callback(null, this.slice()) - this._callback = null - } -} - - -BufferList.prototype.get = function get (index) { - if (index > this.length || index < 0) { - return undefined - } - var offset = this._offset(index) - return this._bufs[offset[0]][offset[1]] -} - - -BufferList.prototype.slice = function slice (start, end) { - if (typeof start == 'number' && start < 0) - start += this.length - if (typeof end == 'number' && end < 0) - end += this.length - return this.copy(null, 0, start, end) -} - - -BufferList.prototype.copy = function copy (dst, dstStart, srcStart, srcEnd) { - if (typeof srcStart != 'number' || srcStart < 0) - srcStart = 0 - if (typeof srcEnd != 'number' || srcEnd > this.length) - srcEnd = this.length - if (srcStart >= this.length) - return dst || Buffer.alloc(0) - if (srcEnd <= 0) - return dst || Buffer.alloc(0) - - var copy = !!dst - , off = this._offset(srcStart) - , len = srcEnd - srcStart - , bytes = len - , bufoff = (copy && dstStart) || 0 - , start = off[1] - , l - , i - - // copy/slice everything - if (srcStart === 0 && srcEnd == this.length) { - if (!copy) { // slice, but full concat if multiple buffers - return this._bufs.length === 1 - ? this._bufs[0] - : Buffer.concat(this._bufs, this.length) - } - - // copy, need to copy individual buffers - for (i = 0; i < this._bufs.length; i++) { - this._bufs[i].copy(dst, bufoff) - bufoff += this._bufs[i].length - } - - return dst - } - - // easy, cheap case where it's a subset of one of the buffers - if (bytes <= this._bufs[off[0]].length - start) { - return copy - ? this._bufs[off[0]].copy(dst, dstStart, start, start + bytes) - : this._bufs[off[0]].slice(start, start + bytes) - } - - if (!copy) // a slice, we need something to copy in to - dst = Buffer.allocUnsafe(len) - - for (i = off[0]; i < this._bufs.length; i++) { - l = this._bufs[i].length - start - - if (bytes > l) { - this._bufs[i].copy(dst, bufoff, start) - bufoff += l - } else { - this._bufs[i].copy(dst, bufoff, start, start + bytes) - bufoff += l - break - } - - bytes -= l - - if (start) - start = 0 - } - - // safeguard so that we don't return uninitialized memory - if (dst.length > bufoff) return dst.slice(0, bufoff) - - return dst -} - -BufferList.prototype.shallowSlice = function shallowSlice (start, end) { - start = start || 0 - end = typeof end !== 'number' ? this.length : end - - if (start < 0) - start += this.length - if (end < 0) - end += this.length - - if (start === end) { - return new BufferList() - } - var startOffset = this._offset(start) - , endOffset = this._offset(end) - , buffers = this._bufs.slice(startOffset[0], endOffset[0] + 1) - - if (endOffset[1] == 0) - buffers.pop() - else - buffers[buffers.length-1] = buffers[buffers.length-1].slice(0, endOffset[1]) - - if (startOffset[1] != 0) - buffers[0] = buffers[0].slice(startOffset[1]) - - return new BufferList(buffers) -} - -BufferList.prototype.toString = function toString (encoding, start, end) { - return this.slice(start, end).toString(encoding) -} - -BufferList.prototype.consume = function consume (bytes) { - // first, normalize the argument, in accordance with how Buffer does it - bytes = Math.trunc(bytes) - // do nothing if not a positive number - if (Number.isNaN(bytes) || bytes <= 0) return this - - while (this._bufs.length) { - if (bytes >= this._bufs[0].length) { - bytes -= this._bufs[0].length - this.length -= this._bufs[0].length - this._bufs.shift() - } else { - this._bufs[0] = this._bufs[0].slice(bytes) - this.length -= bytes - break - } - } - return this -} - - -BufferList.prototype.duplicate = function duplicate () { - var i = 0 - , copy = new BufferList() - - for (; i < this._bufs.length; i++) - copy.append(this._bufs[i]) - - return copy -} - - -BufferList.prototype._destroy = function _destroy (err, cb) { - this._bufs.length = 0 - this.length = 0 - cb(err) -} - - -BufferList.prototype.indexOf = function (search, offset, encoding) { - if (encoding === undefined && typeof offset === 'string') { - encoding = offset - offset = undefined - } - if (typeof search === 'function' || Array.isArray(search)) { - throw new TypeError('The "value" argument must be one of type string, Buffer, BufferList, or Uint8Array.') - } else if (typeof search === 'number') { - search = Buffer.from([search]) - } else if (typeof search === 'string') { - search = Buffer.from(search, encoding) - } else if (search instanceof BufferList) { - search = search.slice() - } else if (!Buffer.isBuffer(search)) { - search = Buffer.from(search) - } - - offset = Number(offset || 0) - if (isNaN(offset)) { - offset = 0 - } - - if (offset < 0) { - offset = this.length + offset - } - - if (offset < 0) { - offset = 0 - } - - if (search.length === 0) { - return offset > this.length ? this.length : offset - } - - var blOffset = this._offset(offset) - var blIndex = blOffset[0] // index of which internal buffer we're working on - var buffOffset = blOffset[1] // offset of the internal buffer we're working on - - // scan over each buffer - for (blIndex; blIndex < this._bufs.length; blIndex++) { - var buff = this._bufs[blIndex] - while(buffOffset < buff.length) { - var availableWindow = buff.length - buffOffset - if (availableWindow >= search.length) { - var nativeSearchResult = buff.indexOf(search, buffOffset) - if (nativeSearchResult !== -1) { - return this._reverseOffset([blIndex, nativeSearchResult]) - } - buffOffset = buff.length - search.length + 1 // end of native search window - } else { - var revOffset = this._reverseOffset([blIndex, buffOffset]) - if (this._match(revOffset, search)) { - return revOffset - } - buffOffset++ - } - } - buffOffset = 0 - } - return -1 -} - -BufferList.prototype._match = function(offset, search) { - if (this.length - offset < search.length) { - return false - } - for (var searchOffset = 0; searchOffset < search.length ; searchOffset++) { - if(this.get(offset + searchOffset) !== search[searchOffset]){ - return false - } - } - return true -} - - -;(function () { - var methods = { - 'readDoubleBE' : 8 - , 'readDoubleLE' : 8 - , 'readFloatBE' : 4 - , 'readFloatLE' : 4 - , 'readInt32BE' : 4 - , 'readInt32LE' : 4 - , 'readUInt32BE' : 4 - , 'readUInt32LE' : 4 - , 'readInt16BE' : 2 - , 'readInt16LE' : 2 - , 'readUInt16BE' : 2 - , 'readUInt16LE' : 2 - , 'readInt8' : 1 - , 'readUInt8' : 1 - , 'readIntBE' : null - , 'readIntLE' : null - , 'readUIntBE' : null - , 'readUIntLE' : null - } - - for (var m in methods) { - (function (m) { - if (methods[m] === null) { - BufferList.prototype[m] = function (offset, byteLength) { - return this.slice(offset, offset + byteLength)[m](0, byteLength) - } - } - else { - BufferList.prototype[m] = function (offset) { - return this.slice(offset, offset + methods[m])[m](0) - } - } - }(m)) - } -}()) - - -module.exports = BufferList - - -/***/ }), - -/***/ 97713: -/***/ ((module) => { - -"use strict"; - - -const codes = {}; - -function createErrorType(code, message, Base) { - if (!Base) { - Base = Error - } - - function getMessage (arg1, arg2, arg3) { - if (typeof message === 'string') { - return message - } else { - return message(arg1, arg2, arg3) - } - } - - class NodeError extends Base { - constructor (arg1, arg2, arg3) { - super(getMessage(arg1, arg2, arg3)); - } - } - - NodeError.prototype.name = Base.name; - NodeError.prototype.code = code; - - codes[code] = NodeError; -} - -// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js -function oneOf(expected, thing) { - if (Array.isArray(expected)) { - const len = expected.length; - expected = expected.map((i) => String(i)); - if (len > 2) { - return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + - expected[len - 1]; - } else if (len === 2) { - return `one of ${thing} ${expected[0]} or ${expected[1]}`; - } else { - return `of ${thing} ${expected[0]}`; - } - } else { - return `of ${thing} ${String(expected)}`; - } -} - -// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith -function startsWith(str, search, pos) { - return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; -} - -// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith -function endsWith(str, search, this_len) { - if (this_len === undefined || this_len > str.length) { - this_len = str.length; - } - return str.substring(this_len - search.length, this_len) === search; -} - -// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes -function includes(str, search, start) { - if (typeof start !== 'number') { - start = 0; - } - - if (start + search.length > str.length) { - return false; - } else { - return str.indexOf(search, start) !== -1; - } -} - -createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { - return 'The value "' + value + '" is invalid for option "' + name + '"' -}, TypeError); -createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { - // determiner: 'must be' or 'must not be' - let determiner; - if (typeof expected === 'string' && startsWith(expected, 'not ')) { - determiner = 'must not be'; - expected = expected.replace(/^not /, ''); - } else { - determiner = 'must be'; - } - - let msg; - if (endsWith(name, ' argument')) { - // For cases like 'first argument' - msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; - } else { - const type = includes(name, '.') ? 'property' : 'argument'; - msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; - } - - msg += `. Received type ${typeof actual}`; - return msg; -}, TypeError); -createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); -createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { - return 'The ' + name + ' method is not implemented' -}); -createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); -createErrorType('ERR_STREAM_DESTROYED', function (name) { - return 'Cannot call ' + name + ' after a stream was destroyed'; -}); -createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); -createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); -createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); -createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); -createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { - return 'Unknown encoding: ' + arg -}, TypeError); -createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); - -module.exports.q = codes; - - -/***/ }), - -/***/ 13928: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. -// a duplex stream is just a stream that is both readable and writable. -// Since JS doesn't have multiple prototypal inheritance, this class -// prototypally inherits from Readable, and then parasitically from -// Writable. - -/**/ - -var objectKeys = Object.keys || function (obj) { - var keys = []; - - for (var key in obj) { - keys.push(key); - } - - return keys; -}; -/**/ - - -module.exports = Duplex; - -var Readable = __nccwpck_require__(85209); - -var Writable = __nccwpck_require__(58729); - -__nccwpck_require__(44124)(Duplex, Readable); - -{ - // Allow the keys array to be GC'ed. - var keys = objectKeys(Writable.prototype); - - for (var v = 0; v < keys.length; v++) { - var method = keys[v]; - if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; - } -} - -function Duplex(options) { - if (!(this instanceof Duplex)) return new Duplex(options); - Readable.call(this, options); - Writable.call(this, options); - this.allowHalfOpen = true; - - if (options) { - if (options.readable === false) this.readable = false; - if (options.writable === false) this.writable = false; - - if (options.allowHalfOpen === false) { - this.allowHalfOpen = false; - this.once('end', onend); - } - } -} - -Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState.highWaterMark; - } -}); -Object.defineProperty(Duplex.prototype, 'writableBuffer', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState && this._writableState.getBuffer(); - } -}); -Object.defineProperty(Duplex.prototype, 'writableLength', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState.length; - } -}); // the no-half-open enforcer - -function onend() { - // If the writable side ended, then we're ok. - if (this._writableState.ended) return; // no more data can be written. - // But allow more writes to happen in this tick. - - process.nextTick(onEndNT, this); -} - -function onEndNT(self) { - self.end(); -} - -Object.defineProperty(Duplex.prototype, 'destroyed', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - if (this._readableState === undefined || this._writableState === undefined) { - return false; - } - - return this._readableState.destroyed && this._writableState.destroyed; - }, - set: function set(value) { - // we ignore the value if the stream - // has not been initialized yet - if (this._readableState === undefined || this._writableState === undefined) { - return; - } // backward compatibility, the user is explicitly - // managing destroyed - - - this._readableState.destroyed = value; - this._writableState.destroyed = value; - } -}); - -/***/ }), - -/***/ 4991: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. -// a passthrough stream. -// basically just the most minimal sort of Transform stream. -// Every written chunk gets output as-is. - - -module.exports = PassThrough; - -var Transform = __nccwpck_require__(26753); - -__nccwpck_require__(44124)(PassThrough, Transform); - -function PassThrough(options) { - if (!(this instanceof PassThrough)) return new PassThrough(options); - Transform.call(this, options); -} - -PassThrough.prototype._transform = function (chunk, encoding, cb) { - cb(null, chunk); -}; - -/***/ }), - -/***/ 85209: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - - -module.exports = Readable; -/**/ - -var Duplex; -/**/ - -Readable.ReadableState = ReadableState; -/**/ - -var EE = __nccwpck_require__(28614).EventEmitter; - -var EElistenerCount = function EElistenerCount(emitter, type) { - return emitter.listeners(type).length; -}; -/**/ - -/**/ - - -var Stream = __nccwpck_require__(36238); -/**/ - - -var Buffer = __nccwpck_require__(64293).Buffer; - -var OurUint8Array = global.Uint8Array || function () {}; - -function _uint8ArrayToBuffer(chunk) { - return Buffer.from(chunk); -} - -function _isUint8Array(obj) { - return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; -} -/**/ - - -var debugUtil = __nccwpck_require__(31669); - -var debug; - -if (debugUtil && debugUtil.debuglog) { - debug = debugUtil.debuglog('stream'); -} else { - debug = function debug() {}; -} -/**/ - - -var BufferList = __nccwpck_require__(70662); - -var destroyImpl = __nccwpck_require__(36994); - -var _require = __nccwpck_require__(3533), - getHighWaterMark = _require.getHighWaterMark; - -var _require$codes = __nccwpck_require__(97713)/* .codes */ .q, - ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, - ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, - ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, - ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; // Lazy loaded to improve the startup performance. - - -var StringDecoder; -var createReadableStreamAsyncIterator; -var from; - -__nccwpck_require__(44124)(Readable, Stream); - -var errorOrDestroy = destroyImpl.errorOrDestroy; -var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; - -function prependListener(emitter, event, fn) { - // Sadly this is not cacheable as some libraries bundle their own - // event emitter implementation with them. - if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); // This is a hack to make sure that our error handler is attached before any - // userland ones. NEVER DO THIS. This is here only because this code needs - // to continue to work with older versions of Node.js that do not include - // the prependListener() method. The goal is to eventually remove this hack. - - if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; -} - -function ReadableState(options, stream, isDuplex) { - Duplex = Duplex || __nccwpck_require__(13928); - options = options || {}; // Duplex streams are both readable and writable, but share - // the same options object. - // However, some cases require setting options to different - // values for the readable and the writable sides of the duplex stream. - // These options can be provided separately as readableXXX and writableXXX. - - if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag. Used to make read(n) ignore n and to - // make all the buffer merging and length checks go away - - this.objectMode = !!options.objectMode; - if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; // the point at which it stops calling _read() to fill the buffer - // Note: 0 is a valid value, means "don't call _read preemptively ever" - - this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); // A linked list is used to store data chunks instead of an array because the - // linked list can remove elements from the beginning faster than - // array.shift() - - this.buffer = new BufferList(); - this.length = 0; - this.pipes = null; - this.pipesCount = 0; - this.flowing = null; - this.ended = false; - this.endEmitted = false; - this.reading = false; // a flag to be able to tell if the event 'readable'/'data' is emitted - // immediately, or on a later tick. We set this to true at first, because - // any actions that shouldn't happen until "later" should generally also - // not happen before the first read call. - - this.sync = true; // whenever we return null, then we set a flag to say - // that we're awaiting a 'readable' event emission. - - this.needReadable = false; - this.emittedReadable = false; - this.readableListening = false; - this.resumeScheduled = false; - this.paused = true; // Should close be emitted on destroy. Defaults to true. - - this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'end' (and potentially 'finish') - - this.autoDestroy = !!options.autoDestroy; // has it been destroyed - - this.destroyed = false; // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - - this.defaultEncoding = options.defaultEncoding || 'utf8'; // the number of writers that are awaiting a drain event in .pipe()s - - this.awaitDrain = 0; // if true, a maybeReadMore has been scheduled - - this.readingMore = false; - this.decoder = null; - this.encoding = null; - - if (options.encoding) { - if (!StringDecoder) StringDecoder = __nccwpck_require__(94841)/* .StringDecoder */ .s; - this.decoder = new StringDecoder(options.encoding); - this.encoding = options.encoding; - } -} - -function Readable(options) { - Duplex = Duplex || __nccwpck_require__(13928); - if (!(this instanceof Readable)) return new Readable(options); // Checking for a Stream.Duplex instance is faster here instead of inside - // the ReadableState constructor, at least with V8 6.5 - - var isDuplex = this instanceof Duplex; - this._readableState = new ReadableState(options, this, isDuplex); // legacy - - this.readable = true; - - if (options) { - if (typeof options.read === 'function') this._read = options.read; - if (typeof options.destroy === 'function') this._destroy = options.destroy; - } - - Stream.call(this); -} - -Object.defineProperty(Readable.prototype, 'destroyed', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - if (this._readableState === undefined) { - return false; - } - - return this._readableState.destroyed; - }, - set: function set(value) { - // we ignore the value if the stream - // has not been initialized yet - if (!this._readableState) { - return; - } // backward compatibility, the user is explicitly - // managing destroyed - - - this._readableState.destroyed = value; - } -}); -Readable.prototype.destroy = destroyImpl.destroy; -Readable.prototype._undestroy = destroyImpl.undestroy; - -Readable.prototype._destroy = function (err, cb) { - cb(err); -}; // Manually shove something into the read() buffer. -// This returns true if the highWaterMark has not been hit yet, -// similar to how Writable.write() returns true if you should -// write() some more. - - -Readable.prototype.push = function (chunk, encoding) { - var state = this._readableState; - var skipChunkCheck; - - if (!state.objectMode) { - if (typeof chunk === 'string') { - encoding = encoding || state.defaultEncoding; - - if (encoding !== state.encoding) { - chunk = Buffer.from(chunk, encoding); - encoding = ''; - } - - skipChunkCheck = true; - } - } else { - skipChunkCheck = true; - } - - return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); -}; // Unshift should *always* be something directly out of read() - - -Readable.prototype.unshift = function (chunk) { - return readableAddChunk(this, chunk, null, true, false); -}; - -function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { - debug('readableAddChunk', chunk); - var state = stream._readableState; - - if (chunk === null) { - state.reading = false; - onEofChunk(stream, state); - } else { - var er; - if (!skipChunkCheck) er = chunkInvalid(state, chunk); - - if (er) { - errorOrDestroy(stream, er); - } else if (state.objectMode || chunk && chunk.length > 0) { - if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { - chunk = _uint8ArrayToBuffer(chunk); - } - - if (addToFront) { - if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); - } else if (state.ended) { - errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); - } else if (state.destroyed) { - return false; - } else { - state.reading = false; - - if (state.decoder && !encoding) { - chunk = state.decoder.write(chunk); - if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); - } else { - addChunk(stream, state, chunk, false); - } - } - } else if (!addToFront) { - state.reading = false; - maybeReadMore(stream, state); - } - } // We can push more data if we are below the highWaterMark. - // Also, if we have no data yet, we can stand some more bytes. - // This is to work around cases where hwm=0, such as the repl. - - - return !state.ended && (state.length < state.highWaterMark || state.length === 0); -} - -function addChunk(stream, state, chunk, addToFront) { - if (state.flowing && state.length === 0 && !state.sync) { - state.awaitDrain = 0; - stream.emit('data', chunk); - } else { - // update the buffer info. - state.length += state.objectMode ? 1 : chunk.length; - if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); - if (state.needReadable) emitReadable(stream); - } - - maybeReadMore(stream, state); -} - -function chunkInvalid(state, chunk) { - var er; - - if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { - er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); - } - - return er; -} - -Readable.prototype.isPaused = function () { - return this._readableState.flowing === false; -}; // backwards compatibility. - - -Readable.prototype.setEncoding = function (enc) { - if (!StringDecoder) StringDecoder = __nccwpck_require__(94841)/* .StringDecoder */ .s; - var decoder = new StringDecoder(enc); - this._readableState.decoder = decoder; // If setEncoding(null), decoder.encoding equals utf8 - - this._readableState.encoding = this._readableState.decoder.encoding; // Iterate over current buffer to convert already stored Buffers: - - var p = this._readableState.buffer.head; - var content = ''; - - while (p !== null) { - content += decoder.write(p.data); - p = p.next; - } - - this._readableState.buffer.clear(); - - if (content !== '') this._readableState.buffer.push(content); - this._readableState.length = content.length; - return this; -}; // Don't raise the hwm > 1GB - - -var MAX_HWM = 0x40000000; - -function computeNewHighWaterMark(n) { - if (n >= MAX_HWM) { - // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. - n = MAX_HWM; - } else { - // Get the next highest power of 2 to prevent increasing hwm excessively in - // tiny amounts - n--; - n |= n >>> 1; - n |= n >>> 2; - n |= n >>> 4; - n |= n >>> 8; - n |= n >>> 16; - n++; - } - - return n; -} // This function is designed to be inlinable, so please take care when making -// changes to the function body. - - -function howMuchToRead(n, state) { - if (n <= 0 || state.length === 0 && state.ended) return 0; - if (state.objectMode) return 1; - - if (n !== n) { - // Only flow one buffer at a time - if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; - } // If we're asking for more than the current hwm, then raise the hwm. - - - if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); - if (n <= state.length) return n; // Don't have enough - - if (!state.ended) { - state.needReadable = true; - return 0; - } - - return state.length; -} // you can override either this method, or the async _read(n) below. - - -Readable.prototype.read = function (n) { - debug('read', n); - n = parseInt(n, 10); - var state = this._readableState; - var nOrig = n; - if (n !== 0) state.emittedReadable = false; // if we're doing read(0) to trigger a readable event, but we - // already have a bunch of data in the buffer, then just trigger - // the 'readable' event and move on. - - if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { - debug('read: emitReadable', state.length, state.ended); - if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); - return null; - } - - n = howMuchToRead(n, state); // if we've ended, and we're now clear, then finish it up. - - if (n === 0 && state.ended) { - if (state.length === 0) endReadable(this); - return null; - } // All the actual chunk generation logic needs to be - // *below* the call to _read. The reason is that in certain - // synthetic stream cases, such as passthrough streams, _read - // may be a completely synchronous operation which may change - // the state of the read buffer, providing enough data when - // before there was *not* enough. - // - // So, the steps are: - // 1. Figure out what the state of things will be after we do - // a read from the buffer. - // - // 2. If that resulting state will trigger a _read, then call _read. - // Note that this may be asynchronous, or synchronous. Yes, it is - // deeply ugly to write APIs this way, but that still doesn't mean - // that the Readable class should behave improperly, as streams are - // designed to be sync/async agnostic. - // Take note if the _read call is sync or async (ie, if the read call - // has returned yet), so that we know whether or not it's safe to emit - // 'readable' etc. - // - // 3. Actually pull the requested chunks out of the buffer and return. - // if we need a readable event, then we need to do some reading. - - - var doRead = state.needReadable; - debug('need readable', doRead); // if we currently have less than the highWaterMark, then also read some - - if (state.length === 0 || state.length - n < state.highWaterMark) { - doRead = true; - debug('length less than watermark', doRead); - } // however, if we've ended, then there's no point, and if we're already - // reading, then it's unnecessary. - - - if (state.ended || state.reading) { - doRead = false; - debug('reading or ended', doRead); - } else if (doRead) { - debug('do read'); - state.reading = true; - state.sync = true; // if the length is currently zero, then we *need* a readable event. - - if (state.length === 0) state.needReadable = true; // call internal read method - - this._read(state.highWaterMark); - - state.sync = false; // If _read pushed data synchronously, then `reading` will be false, - // and we need to re-evaluate how much data we can return to the user. - - if (!state.reading) n = howMuchToRead(nOrig, state); - } - - var ret; - if (n > 0) ret = fromList(n, state);else ret = null; - - if (ret === null) { - state.needReadable = state.length <= state.highWaterMark; - n = 0; - } else { - state.length -= n; - state.awaitDrain = 0; - } - - if (state.length === 0) { - // If we have nothing in the buffer, then we want to know - // as soon as we *do* get something into the buffer. - if (!state.ended) state.needReadable = true; // If we tried to read() past the EOF, then emit end on the next tick. - - if (nOrig !== n && state.ended) endReadable(this); - } - - if (ret !== null) this.emit('data', ret); - return ret; -}; - -function onEofChunk(stream, state) { - debug('onEofChunk'); - if (state.ended) return; - - if (state.decoder) { - var chunk = state.decoder.end(); - - if (chunk && chunk.length) { - state.buffer.push(chunk); - state.length += state.objectMode ? 1 : chunk.length; - } - } - - state.ended = true; - - if (state.sync) { - // if we are sync, wait until next tick to emit the data. - // Otherwise we risk emitting data in the flow() - // the readable code triggers during a read() call - emitReadable(stream); - } else { - // emit 'readable' now to make sure it gets picked up. - state.needReadable = false; - - if (!state.emittedReadable) { - state.emittedReadable = true; - emitReadable_(stream); - } - } -} // Don't emit readable right away in sync mode, because this can trigger -// another read() call => stack overflow. This way, it might trigger -// a nextTick recursion warning, but that's not so bad. - - -function emitReadable(stream) { - var state = stream._readableState; - debug('emitReadable', state.needReadable, state.emittedReadable); - state.needReadable = false; - - if (!state.emittedReadable) { - debug('emitReadable', state.flowing); - state.emittedReadable = true; - process.nextTick(emitReadable_, stream); - } -} - -function emitReadable_(stream) { - var state = stream._readableState; - debug('emitReadable_', state.destroyed, state.length, state.ended); - - if (!state.destroyed && (state.length || state.ended)) { - stream.emit('readable'); - state.emittedReadable = false; - } // The stream needs another readable event if - // 1. It is not flowing, as the flow mechanism will take - // care of it. - // 2. It is not ended. - // 3. It is below the highWaterMark, so we can schedule - // another readable later. - - - state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; - flow(stream); -} // at this point, the user has presumably seen the 'readable' event, -// and called read() to consume some data. that may have triggered -// in turn another _read(n) call, in which case reading = true if -// it's in progress. -// However, if we're not ended, or reading, and the length < hwm, -// then go ahead and try to read some more preemptively. - - -function maybeReadMore(stream, state) { - if (!state.readingMore) { - state.readingMore = true; - process.nextTick(maybeReadMore_, stream, state); - } -} - -function maybeReadMore_(stream, state) { - // Attempt to read more data if we should. - // - // The conditions for reading more data are (one of): - // - Not enough data buffered (state.length < state.highWaterMark). The loop - // is responsible for filling the buffer with enough data if such data - // is available. If highWaterMark is 0 and we are not in the flowing mode - // we should _not_ attempt to buffer any extra data. We'll get more data - // when the stream consumer calls read() instead. - // - No data in the buffer, and the stream is in flowing mode. In this mode - // the loop below is responsible for ensuring read() is called. Failing to - // call read here would abort the flow and there's no other mechanism for - // continuing the flow if the stream consumer has just subscribed to the - // 'data' event. - // - // In addition to the above conditions to keep reading data, the following - // conditions prevent the data from being read: - // - The stream has ended (state.ended). - // - There is already a pending 'read' operation (state.reading). This is a - // case where the the stream has called the implementation defined _read() - // method, but they are processing the call asynchronously and have _not_ - // called push() with new data. In this case we skip performing more - // read()s. The execution ends in this method again after the _read() ends - // up calling push() with more data. - while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { - var len = state.length; - debug('maybeReadMore read 0'); - stream.read(0); - if (len === state.length) // didn't get any data, stop spinning. - break; - } - - state.readingMore = false; -} // abstract method. to be overridden in specific implementation classes. -// call cb(er, data) where data is <= n in length. -// for virtual (non-string, non-buffer) streams, "length" is somewhat -// arbitrary, and perhaps not very meaningful. - - -Readable.prototype._read = function (n) { - errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); -}; - -Readable.prototype.pipe = function (dest, pipeOpts) { - var src = this; - var state = this._readableState; - - switch (state.pipesCount) { - case 0: - state.pipes = dest; - break; - - case 1: - state.pipes = [state.pipes, dest]; - break; - - default: - state.pipes.push(dest); - break; - } - - state.pipesCount += 1; - debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); - var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; - var endFn = doEnd ? onend : unpipe; - if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); - dest.on('unpipe', onunpipe); - - function onunpipe(readable, unpipeInfo) { - debug('onunpipe'); - - if (readable === src) { - if (unpipeInfo && unpipeInfo.hasUnpiped === false) { - unpipeInfo.hasUnpiped = true; - cleanup(); - } - } - } - - function onend() { - debug('onend'); - dest.end(); - } // when the dest drains, it reduces the awaitDrain counter - // on the source. This would be more elegant with a .once() - // handler in flow(), but adding and removing repeatedly is - // too slow. - - - var ondrain = pipeOnDrain(src); - dest.on('drain', ondrain); - var cleanedUp = false; - - function cleanup() { - debug('cleanup'); // cleanup event handlers once the pipe is broken - - dest.removeListener('close', onclose); - dest.removeListener('finish', onfinish); - dest.removeListener('drain', ondrain); - dest.removeListener('error', onerror); - dest.removeListener('unpipe', onunpipe); - src.removeListener('end', onend); - src.removeListener('end', unpipe); - src.removeListener('data', ondata); - cleanedUp = true; // if the reader is waiting for a drain event from this - // specific writer, then it would cause it to never start - // flowing again. - // So, if this is awaiting a drain, then we just call it now. - // If we don't know, then assume that we are waiting for one. - - if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); - } - - src.on('data', ondata); - - function ondata(chunk) { - debug('ondata'); - var ret = dest.write(chunk); - debug('dest.write', ret); - - if (ret === false) { - // If the user unpiped during `dest.write()`, it is possible - // to get stuck in a permanently paused state if that write - // also returned false. - // => Check whether `dest` is still a piping destination. - if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { - debug('false write response, pause', state.awaitDrain); - state.awaitDrain++; - } - - src.pause(); - } - } // if the dest has an error, then stop piping into it. - // however, don't suppress the throwing behavior for this. - - - function onerror(er) { - debug('onerror', er); - unpipe(); - dest.removeListener('error', onerror); - if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); - } // Make sure our error handler is attached before userland ones. - - - prependListener(dest, 'error', onerror); // Both close and finish should trigger unpipe, but only once. - - function onclose() { - dest.removeListener('finish', onfinish); - unpipe(); - } - - dest.once('close', onclose); - - function onfinish() { - debug('onfinish'); - dest.removeListener('close', onclose); - unpipe(); - } - - dest.once('finish', onfinish); - - function unpipe() { - debug('unpipe'); - src.unpipe(dest); - } // tell the dest that it's being piped to - - - dest.emit('pipe', src); // start the flow if it hasn't been started already. - - if (!state.flowing) { - debug('pipe resume'); - src.resume(); - } - - return dest; -}; - -function pipeOnDrain(src) { - return function pipeOnDrainFunctionResult() { - var state = src._readableState; - debug('pipeOnDrain', state.awaitDrain); - if (state.awaitDrain) state.awaitDrain--; - - if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { - state.flowing = true; - flow(src); - } - }; -} - -Readable.prototype.unpipe = function (dest) { - var state = this._readableState; - var unpipeInfo = { - hasUnpiped: false - }; // if we're not piping anywhere, then do nothing. - - if (state.pipesCount === 0) return this; // just one destination. most common case. - - if (state.pipesCount === 1) { - // passed in one, but it's not the right one. - if (dest && dest !== state.pipes) return this; - if (!dest) dest = state.pipes; // got a match. - - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - if (dest) dest.emit('unpipe', this, unpipeInfo); - return this; - } // slow case. multiple pipe destinations. - - - if (!dest) { - // remove all. - var dests = state.pipes; - var len = state.pipesCount; - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - - for (var i = 0; i < len; i++) { - dests[i].emit('unpipe', this, { - hasUnpiped: false - }); - } - - return this; - } // try to find the right one. - - - var index = indexOf(state.pipes, dest); - if (index === -1) return this; - state.pipes.splice(index, 1); - state.pipesCount -= 1; - if (state.pipesCount === 1) state.pipes = state.pipes[0]; - dest.emit('unpipe', this, unpipeInfo); - return this; -}; // set up data events if they are asked for -// Ensure readable listeners eventually get something - - -Readable.prototype.on = function (ev, fn) { - var res = Stream.prototype.on.call(this, ev, fn); - var state = this._readableState; - - if (ev === 'data') { - // update readableListening so that resume() may be a no-op - // a few lines down. This is needed to support once('readable'). - state.readableListening = this.listenerCount('readable') > 0; // Try start flowing on next tick if stream isn't explicitly paused - - if (state.flowing !== false) this.resume(); - } else if (ev === 'readable') { - if (!state.endEmitted && !state.readableListening) { - state.readableListening = state.needReadable = true; - state.flowing = false; - state.emittedReadable = false; - debug('on readable', state.length, state.reading); - - if (state.length) { - emitReadable(this); - } else if (!state.reading) { - process.nextTick(nReadingNextTick, this); - } - } - } - - return res; -}; - -Readable.prototype.addListener = Readable.prototype.on; - -Readable.prototype.removeListener = function (ev, fn) { - var res = Stream.prototype.removeListener.call(this, ev, fn); - - if (ev === 'readable') { - // We need to check if there is someone still listening to - // readable and reset the state. However this needs to happen - // after readable has been emitted but before I/O (nextTick) to - // support once('readable', fn) cycles. This means that calling - // resume within the same tick will have no - // effect. - process.nextTick(updateReadableListening, this); - } - - return res; -}; - -Readable.prototype.removeAllListeners = function (ev) { - var res = Stream.prototype.removeAllListeners.apply(this, arguments); - - if (ev === 'readable' || ev === undefined) { - // We need to check if there is someone still listening to - // readable and reset the state. However this needs to happen - // after readable has been emitted but before I/O (nextTick) to - // support once('readable', fn) cycles. This means that calling - // resume within the same tick will have no - // effect. - process.nextTick(updateReadableListening, this); - } - - return res; -}; - -function updateReadableListening(self) { - var state = self._readableState; - state.readableListening = self.listenerCount('readable') > 0; - - if (state.resumeScheduled && !state.paused) { - // flowing needs to be set to true now, otherwise - // the upcoming resume will not flow. - state.flowing = true; // crude way to check if we should resume - } else if (self.listenerCount('data') > 0) { - self.resume(); - } -} - -function nReadingNextTick(self) { - debug('readable nexttick read 0'); - self.read(0); -} // pause() and resume() are remnants of the legacy readable stream API -// If the user uses them, then switch into old mode. - - -Readable.prototype.resume = function () { - var state = this._readableState; - - if (!state.flowing) { - debug('resume'); // we flow only if there is no one listening - // for readable, but we still have to call - // resume() - - state.flowing = !state.readableListening; - resume(this, state); - } - - state.paused = false; - return this; -}; - -function resume(stream, state) { - if (!state.resumeScheduled) { - state.resumeScheduled = true; - process.nextTick(resume_, stream, state); - } -} - -function resume_(stream, state) { - debug('resume', state.reading); - - if (!state.reading) { - stream.read(0); - } - - state.resumeScheduled = false; - stream.emit('resume'); - flow(stream); - if (state.flowing && !state.reading) stream.read(0); -} - -Readable.prototype.pause = function () { - debug('call pause flowing=%j', this._readableState.flowing); - - if (this._readableState.flowing !== false) { - debug('pause'); - this._readableState.flowing = false; - this.emit('pause'); - } - - this._readableState.paused = true; - return this; -}; - -function flow(stream) { - var state = stream._readableState; - debug('flow', state.flowing); - - while (state.flowing && stream.read() !== null) { - ; - } -} // wrap an old-style stream as the async data source. -// This is *not* part of the readable stream interface. -// It is an ugly unfortunate mess of history. - - -Readable.prototype.wrap = function (stream) { - var _this = this; - - var state = this._readableState; - var paused = false; - stream.on('end', function () { - debug('wrapped end'); - - if (state.decoder && !state.ended) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) _this.push(chunk); - } - - _this.push(null); - }); - stream.on('data', function (chunk) { - debug('wrapped data'); - if (state.decoder) chunk = state.decoder.write(chunk); // don't skip over falsy values in objectMode - - if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; - - var ret = _this.push(chunk); - - if (!ret) { - paused = true; - stream.pause(); - } - }); // proxy all the other methods. - // important when wrapping filters and duplexes. - - for (var i in stream) { - if (this[i] === undefined && typeof stream[i] === 'function') { - this[i] = function methodWrap(method) { - return function methodWrapReturnFunction() { - return stream[method].apply(stream, arguments); - }; - }(i); - } - } // proxy certain important events. - - - for (var n = 0; n < kProxyEvents.length; n++) { - stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); - } // when we try to consume some more bytes, simply unpause the - // underlying stream. - - - this._read = function (n) { - debug('wrapped _read', n); - - if (paused) { - paused = false; - stream.resume(); - } - }; - - return this; -}; - -if (typeof Symbol === 'function') { - Readable.prototype[Symbol.asyncIterator] = function () { - if (createReadableStreamAsyncIterator === undefined) { - createReadableStreamAsyncIterator = __nccwpck_require__(37558); - } - - return createReadableStreamAsyncIterator(this); - }; -} - -Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._readableState.highWaterMark; - } -}); -Object.defineProperty(Readable.prototype, 'readableBuffer', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._readableState && this._readableState.buffer; - } -}); -Object.defineProperty(Readable.prototype, 'readableFlowing', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._readableState.flowing; - }, - set: function set(state) { - if (this._readableState) { - this._readableState.flowing = state; - } - } -}); // exposed for testing purposes only. - -Readable._fromList = fromList; -Object.defineProperty(Readable.prototype, 'readableLength', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._readableState.length; - } -}); // Pluck off n bytes from an array of buffers. -// Length is the combined lengths of all the buffers in the list. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. - -function fromList(n, state) { - // nothing buffered - if (state.length === 0) return null; - var ret; - if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { - // read it all, truncate the list - if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); - state.buffer.clear(); - } else { - // read part of list - ret = state.buffer.consume(n, state.decoder); - } - return ret; -} - -function endReadable(stream) { - var state = stream._readableState; - debug('endReadable', state.endEmitted); - - if (!state.endEmitted) { - state.ended = true; - process.nextTick(endReadableNT, state, stream); - } -} - -function endReadableNT(state, stream) { - debug('endReadableNT', state.endEmitted, state.length); // Check that we didn't get one last unshift. - - if (!state.endEmitted && state.length === 0) { - state.endEmitted = true; - stream.readable = false; - stream.emit('end'); - - if (state.autoDestroy) { - // In case of duplex streams we need a way to detect - // if the writable side is ready for autoDestroy as well - var wState = stream._writableState; - - if (!wState || wState.autoDestroy && wState.finished) { - stream.destroy(); - } - } - } -} - -if (typeof Symbol === 'function') { - Readable.from = function (iterable, opts) { - if (from === undefined) { - from = __nccwpck_require__(57039); - } - - return from(Readable, iterable, opts); - }; -} - -function indexOf(xs, x) { - for (var i = 0, l = xs.length; i < l; i++) { - if (xs[i] === x) return i; - } - - return -1; -} - -/***/ }), - -/***/ 26753: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. -// a transform stream is a readable/writable stream where you do -// something with the data. Sometimes it's called a "filter", -// but that's not a great name for it, since that implies a thing where -// some bits pass through, and others are simply ignored. (That would -// be a valid example of a transform, of course.) -// -// While the output is causally related to the input, it's not a -// necessarily symmetric or synchronous transformation. For example, -// a zlib stream might take multiple plain-text writes(), and then -// emit a single compressed chunk some time in the future. -// -// Here's how this works: -// -// The Transform stream has all the aspects of the readable and writable -// stream classes. When you write(chunk), that calls _write(chunk,cb) -// internally, and returns false if there's a lot of pending writes -// buffered up. When you call read(), that calls _read(n) until -// there's enough pending readable data buffered up. -// -// In a transform stream, the written data is placed in a buffer. When -// _read(n) is called, it transforms the queued up data, calling the -// buffered _write cb's as it consumes chunks. If consuming a single -// written chunk would result in multiple output chunks, then the first -// outputted bit calls the readcb, and subsequent chunks just go into -// the read buffer, and will cause it to emit 'readable' if necessary. -// -// This way, back-pressure is actually determined by the reading side, -// since _read has to be called to start processing a new chunk. However, -// a pathological inflate type of transform can cause excessive buffering -// here. For example, imagine a stream where every byte of input is -// interpreted as an integer from 0-255, and then results in that many -// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in -// 1kb of data being output. In this case, you could write a very small -// amount of input, and end up with a very large amount of output. In -// such a pathological inflating mechanism, there'd be no way to tell -// the system to stop doing the transform. A single 4MB write could -// cause the system to run out of memory. -// -// However, even in such a pathological case, only a single written chunk -// would be consumed, and then the rest would wait (un-transformed) until -// the results of the previous transformed chunk were consumed. - - -module.exports = Transform; - -var _require$codes = __nccwpck_require__(97713)/* .codes */ .q, - ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, - ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, - ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, - ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; - -var Duplex = __nccwpck_require__(13928); - -__nccwpck_require__(44124)(Transform, Duplex); - -function afterTransform(er, data) { - var ts = this._transformState; - ts.transforming = false; - var cb = ts.writecb; - - if (cb === null) { - return this.emit('error', new ERR_MULTIPLE_CALLBACK()); - } - - ts.writechunk = null; - ts.writecb = null; - if (data != null) // single equals check for both `null` and `undefined` - this.push(data); - cb(er); - var rs = this._readableState; - rs.reading = false; - - if (rs.needReadable || rs.length < rs.highWaterMark) { - this._read(rs.highWaterMark); - } -} - -function Transform(options) { - if (!(this instanceof Transform)) return new Transform(options); - Duplex.call(this, options); - this._transformState = { - afterTransform: afterTransform.bind(this), - needTransform: false, - transforming: false, - writecb: null, - writechunk: null, - writeencoding: null - }; // start out asking for a readable event once data is transformed. - - this._readableState.needReadable = true; // we have implemented the _read method, and done the other things - // that Readable wants before the first _read call, so unset the - // sync guard flag. - - this._readableState.sync = false; - - if (options) { - if (typeof options.transform === 'function') this._transform = options.transform; - if (typeof options.flush === 'function') this._flush = options.flush; - } // When the writable side finishes, then flush out anything remaining. - - - this.on('prefinish', prefinish); -} - -function prefinish() { - var _this = this; - - if (typeof this._flush === 'function' && !this._readableState.destroyed) { - this._flush(function (er, data) { - done(_this, er, data); - }); - } else { - done(this, null, null); - } -} - -Transform.prototype.push = function (chunk, encoding) { - this._transformState.needTransform = false; - return Duplex.prototype.push.call(this, chunk, encoding); -}; // This is the part where you do stuff! -// override this function in implementation classes. -// 'chunk' is an input chunk. -// -// Call `push(newChunk)` to pass along transformed output -// to the readable side. You may call 'push' zero or more times. -// -// Call `cb(err)` when you are done with this chunk. If you pass -// an error, then that'll put the hurt on the whole operation. If you -// never call cb(), then you'll never get another chunk. - - -Transform.prototype._transform = function (chunk, encoding, cb) { - cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); -}; - -Transform.prototype._write = function (chunk, encoding, cb) { - var ts = this._transformState; - ts.writecb = cb; - ts.writechunk = chunk; - ts.writeencoding = encoding; - - if (!ts.transforming) { - var rs = this._readableState; - if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); - } -}; // Doesn't matter what the args are here. -// _transform does all the work. -// That we got here means that the readable side wants more data. - - -Transform.prototype._read = function (n) { - var ts = this._transformState; - - if (ts.writechunk !== null && !ts.transforming) { - ts.transforming = true; - - this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); - } else { - // mark that we need a transform, so that any data that comes in - // will get processed, now that we've asked for it. - ts.needTransform = true; - } -}; - -Transform.prototype._destroy = function (err, cb) { - Duplex.prototype._destroy.call(this, err, function (err2) { - cb(err2); - }); -}; - -function done(stream, er, data) { - if (er) return stream.emit('error', er); - if (data != null) // single equals check for both `null` and `undefined` - stream.push(data); // TODO(BridgeAR): Write a test for these two error cases - // if there's nothing in the write buffer, then that means - // that nothing more will ever be provided - - if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); - if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); - return stream.push(null); -} - -/***/ }), - -/***/ 58729: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. -// A bit simpler than readable streams. -// Implement an async ._write(chunk, encoding, cb), and it'll handle all -// the drain event emission and buffering. - - -module.exports = Writable; -/* */ - -function WriteReq(chunk, encoding, cb) { - this.chunk = chunk; - this.encoding = encoding; - this.callback = cb; - this.next = null; -} // It seems a linked list but it is not -// there will be only 2 of these for each stream - - -function CorkedRequest(state) { - var _this = this; - - this.next = null; - this.entry = null; - - this.finish = function () { - onCorkedFinish(_this, state); - }; -} -/* */ - -/**/ - - -var Duplex; -/**/ - -Writable.WritableState = WritableState; -/**/ - -var internalUtil = { - deprecate: __nccwpck_require__(65278) -}; -/**/ - -/**/ - -var Stream = __nccwpck_require__(36238); -/**/ - - -var Buffer = __nccwpck_require__(64293).Buffer; - -var OurUint8Array = global.Uint8Array || function () {}; - -function _uint8ArrayToBuffer(chunk) { - return Buffer.from(chunk); -} - -function _isUint8Array(obj) { - return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; -} - -var destroyImpl = __nccwpck_require__(36994); - -var _require = __nccwpck_require__(3533), - getHighWaterMark = _require.getHighWaterMark; - -var _require$codes = __nccwpck_require__(97713)/* .codes */ .q, - ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, - ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, - ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, - ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, - ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, - ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, - ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, - ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; - -var errorOrDestroy = destroyImpl.errorOrDestroy; - -__nccwpck_require__(44124)(Writable, Stream); - -function nop() {} - -function WritableState(options, stream, isDuplex) { - Duplex = Duplex || __nccwpck_require__(13928); - options = options || {}; // Duplex streams are both readable and writable, but share - // the same options object. - // However, some cases require setting options to different - // values for the readable and the writable sides of the duplex stream, - // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. - - if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag to indicate whether or not this stream - // contains buffers or objects. - - this.objectMode = !!options.objectMode; - if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; // the point at which write() starts returning false - // Note: 0 is a valid value, means that we always return false if - // the entire buffer is not flushed immediately on write() - - this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); // if _final has been called - - this.finalCalled = false; // drain event flag. - - this.needDrain = false; // at the start of calling end() - - this.ending = false; // when end() has been called, and returned - - this.ended = false; // when 'finish' is emitted - - this.finished = false; // has it been destroyed - - this.destroyed = false; // should we decode strings into buffers before passing to _write? - // this is here so that some node-core streams can optimize string - // handling at a lower level. - - var noDecode = options.decodeStrings === false; - this.decodeStrings = !noDecode; // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - - this.defaultEncoding = options.defaultEncoding || 'utf8'; // not an actual buffer we keep track of, but a measurement - // of how much we're waiting to get pushed to some underlying - // socket or file. - - this.length = 0; // a flag to see when we're in the middle of a write. - - this.writing = false; // when true all writes will be buffered until .uncork() call - - this.corked = 0; // a flag to be able to tell if the onwrite cb is called immediately, - // or on a later tick. We set this to true at first, because any - // actions that shouldn't happen until "later" should generally also - // not happen before the first write call. - - this.sync = true; // a flag to know if we're processing previously buffered items, which - // may call the _write() callback in the same tick, so that we don't - // end up in an overlapped onwrite situation. - - this.bufferProcessing = false; // the callback that's passed to _write(chunk,cb) - - this.onwrite = function (er) { - onwrite(stream, er); - }; // the callback that the user supplies to write(chunk,encoding,cb) - - - this.writecb = null; // the amount that is being written when _write is called. - - this.writelen = 0; - this.bufferedRequest = null; - this.lastBufferedRequest = null; // number of pending user-supplied write callbacks - // this must be 0 before 'finish' can be emitted - - this.pendingcb = 0; // emit prefinish if the only thing we're waiting for is _write cbs - // This is relevant for synchronous Transform streams - - this.prefinished = false; // True if the error was already emitted and should not be thrown again - - this.errorEmitted = false; // Should close be emitted on destroy. Defaults to true. - - this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'finish' (and potentially 'end') - - this.autoDestroy = !!options.autoDestroy; // count buffered requests - - this.bufferedRequestCount = 0; // allocate the first CorkedRequest, there is always - // one allocated and free to use, and we maintain at most two - - this.corkedRequestsFree = new CorkedRequest(this); -} - -WritableState.prototype.getBuffer = function getBuffer() { - var current = this.bufferedRequest; - var out = []; - - while (current) { - out.push(current); - current = current.next; - } - - return out; -}; - -(function () { - try { - Object.defineProperty(WritableState.prototype, 'buffer', { - get: internalUtil.deprecate(function writableStateBufferGetter() { - return this.getBuffer(); - }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') - }); - } catch (_) {} -})(); // Test _writableState for inheritance to account for Duplex streams, -// whose prototype chain only points to Readable. - - -var realHasInstance; - -if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { - realHasInstance = Function.prototype[Symbol.hasInstance]; - Object.defineProperty(Writable, Symbol.hasInstance, { - value: function value(object) { - if (realHasInstance.call(this, object)) return true; - if (this !== Writable) return false; - return object && object._writableState instanceof WritableState; - } - }); -} else { - realHasInstance = function realHasInstance(object) { - return object instanceof this; - }; -} - -function Writable(options) { - Duplex = Duplex || __nccwpck_require__(13928); // Writable ctor is applied to Duplexes, too. - // `realHasInstance` is necessary because using plain `instanceof` - // would return false, as no `_writableState` property is attached. - // Trying to use the custom `instanceof` for Writable here will also break the - // Node.js LazyTransform implementation, which has a non-trivial getter for - // `_writableState` that would lead to infinite recursion. - // Checking for a Stream.Duplex instance is faster here instead of inside - // the WritableState constructor, at least with V8 6.5 - - var isDuplex = this instanceof Duplex; - if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); - this._writableState = new WritableState(options, this, isDuplex); // legacy. - - this.writable = true; - - if (options) { - if (typeof options.write === 'function') this._write = options.write; - if (typeof options.writev === 'function') this._writev = options.writev; - if (typeof options.destroy === 'function') this._destroy = options.destroy; - if (typeof options.final === 'function') this._final = options.final; - } - - Stream.call(this); -} // Otherwise people can pipe Writable streams, which is just wrong. - - -Writable.prototype.pipe = function () { - errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); -}; - -function writeAfterEnd(stream, cb) { - var er = new ERR_STREAM_WRITE_AFTER_END(); // TODO: defer error events consistently everywhere, not just the cb - - errorOrDestroy(stream, er); - process.nextTick(cb, er); -} // Checks that a user-supplied chunk is valid, especially for the particular -// mode the stream is in. Currently this means that `null` is never accepted -// and undefined/non-string values are only allowed in object mode. - - -function validChunk(stream, state, chunk, cb) { - var er; - - if (chunk === null) { - er = new ERR_STREAM_NULL_VALUES(); - } else if (typeof chunk !== 'string' && !state.objectMode) { - er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); - } - - if (er) { - errorOrDestroy(stream, er); - process.nextTick(cb, er); - return false; - } - - return true; -} - -Writable.prototype.write = function (chunk, encoding, cb) { - var state = this._writableState; - var ret = false; - - var isBuf = !state.objectMode && _isUint8Array(chunk); - - if (isBuf && !Buffer.isBuffer(chunk)) { - chunk = _uint8ArrayToBuffer(chunk); - } - - if (typeof encoding === 'function') { - cb = encoding; - encoding = null; - } - - if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; - if (typeof cb !== 'function') cb = nop; - if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { - state.pendingcb++; - ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); - } - return ret; -}; - -Writable.prototype.cork = function () { - this._writableState.corked++; -}; - -Writable.prototype.uncork = function () { - var state = this._writableState; - - if (state.corked) { - state.corked--; - if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); - } -}; - -Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { - // node::ParseEncoding() requires lower case. - if (typeof encoding === 'string') encoding = encoding.toLowerCase(); - if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding); - this._writableState.defaultEncoding = encoding; - return this; -}; - -Object.defineProperty(Writable.prototype, 'writableBuffer', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState && this._writableState.getBuffer(); - } -}); - -function decodeChunk(state, chunk, encoding) { - if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { - chunk = Buffer.from(chunk, encoding); - } - - return chunk; -} - -Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState.highWaterMark; - } -}); // if we're already writing something, then just put this -// in the queue, and wait our turn. Otherwise, call _write -// If we return false, then we need a drain event, so set that flag. - -function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { - if (!isBuf) { - var newChunk = decodeChunk(state, chunk, encoding); - - if (chunk !== newChunk) { - isBuf = true; - encoding = 'buffer'; - chunk = newChunk; - } - } - - var len = state.objectMode ? 1 : chunk.length; - state.length += len; - var ret = state.length < state.highWaterMark; // we must ensure that previous needDrain will not be reset to false. - - if (!ret) state.needDrain = true; - - if (state.writing || state.corked) { - var last = state.lastBufferedRequest; - state.lastBufferedRequest = { - chunk: chunk, - encoding: encoding, - isBuf: isBuf, - callback: cb, - next: null - }; - - if (last) { - last.next = state.lastBufferedRequest; - } else { - state.bufferedRequest = state.lastBufferedRequest; - } - - state.bufferedRequestCount += 1; - } else { - doWrite(stream, state, false, len, chunk, encoding, cb); - } - - return ret; -} - -function doWrite(stream, state, writev, len, chunk, encoding, cb) { - state.writelen = len; - state.writecb = cb; - state.writing = true; - state.sync = true; - if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); - state.sync = false; -} - -function onwriteError(stream, state, sync, er, cb) { - --state.pendingcb; - - if (sync) { - // defer the callback if we are being called synchronously - // to avoid piling up things on the stack - process.nextTick(cb, er); // this can emit finish, and it will always happen - // after error - - process.nextTick(finishMaybe, stream, state); - stream._writableState.errorEmitted = true; - errorOrDestroy(stream, er); - } else { - // the caller expect this to happen before if - // it is async - cb(er); - stream._writableState.errorEmitted = true; - errorOrDestroy(stream, er); // this can emit finish, but finish must - // always follow error - - finishMaybe(stream, state); - } -} - -function onwriteStateUpdate(state) { - state.writing = false; - state.writecb = null; - state.length -= state.writelen; - state.writelen = 0; -} - -function onwrite(stream, er) { - var state = stream._writableState; - var sync = state.sync; - var cb = state.writecb; - if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); - onwriteStateUpdate(state); - if (er) onwriteError(stream, state, sync, er, cb);else { - // Check if we're actually ready to finish, but don't emit yet - var finished = needFinish(state) || stream.destroyed; - - if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { - clearBuffer(stream, state); - } - - if (sync) { - process.nextTick(afterWrite, stream, state, finished, cb); - } else { - afterWrite(stream, state, finished, cb); - } - } -} - -function afterWrite(stream, state, finished, cb) { - if (!finished) onwriteDrain(stream, state); - state.pendingcb--; - cb(); - finishMaybe(stream, state); -} // Must force callback to be called on nextTick, so that we don't -// emit 'drain' before the write() consumer gets the 'false' return -// value, and has a chance to attach a 'drain' listener. - - -function onwriteDrain(stream, state) { - if (state.length === 0 && state.needDrain) { - state.needDrain = false; - stream.emit('drain'); - } -} // if there's something in the buffer waiting, then process it - - -function clearBuffer(stream, state) { - state.bufferProcessing = true; - var entry = state.bufferedRequest; - - if (stream._writev && entry && entry.next) { - // Fast case, write everything using _writev() - var l = state.bufferedRequestCount; - var buffer = new Array(l); - var holder = state.corkedRequestsFree; - holder.entry = entry; - var count = 0; - var allBuffers = true; - - while (entry) { - buffer[count] = entry; - if (!entry.isBuf) allBuffers = false; - entry = entry.next; - count += 1; - } - - buffer.allBuffers = allBuffers; - doWrite(stream, state, true, state.length, buffer, '', holder.finish); // doWrite is almost always async, defer these to save a bit of time - // as the hot path ends with doWrite - - state.pendingcb++; - state.lastBufferedRequest = null; - - if (holder.next) { - state.corkedRequestsFree = holder.next; - holder.next = null; - } else { - state.corkedRequestsFree = new CorkedRequest(state); - } - - state.bufferedRequestCount = 0; - } else { - // Slow case, write chunks one-by-one - while (entry) { - var chunk = entry.chunk; - var encoding = entry.encoding; - var cb = entry.callback; - var len = state.objectMode ? 1 : chunk.length; - doWrite(stream, state, false, len, chunk, encoding, cb); - entry = entry.next; - state.bufferedRequestCount--; // if we didn't call the onwrite immediately, then - // it means that we need to wait until it does. - // also, that means that the chunk and cb are currently - // being processed, so move the buffer counter past them. - - if (state.writing) { - break; - } - } - - if (entry === null) state.lastBufferedRequest = null; - } - - state.bufferedRequest = entry; - state.bufferProcessing = false; -} - -Writable.prototype._write = function (chunk, encoding, cb) { - cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); -}; - -Writable.prototype._writev = null; - -Writable.prototype.end = function (chunk, encoding, cb) { - var state = this._writableState; - - if (typeof chunk === 'function') { - cb = chunk; - chunk = null; - encoding = null; - } else if (typeof encoding === 'function') { - cb = encoding; - encoding = null; - } - - if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); // .end() fully uncorks - - if (state.corked) { - state.corked = 1; - this.uncork(); - } // ignore unnecessary end() calls. - - - if (!state.ending) endWritable(this, state, cb); - return this; -}; - -Object.defineProperty(Writable.prototype, 'writableLength', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState.length; - } -}); - -function needFinish(state) { - return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; -} - -function callFinal(stream, state) { - stream._final(function (err) { - state.pendingcb--; - - if (err) { - errorOrDestroy(stream, err); - } - - state.prefinished = true; - stream.emit('prefinish'); - finishMaybe(stream, state); - }); -} - -function prefinish(stream, state) { - if (!state.prefinished && !state.finalCalled) { - if (typeof stream._final === 'function' && !state.destroyed) { - state.pendingcb++; - state.finalCalled = true; - process.nextTick(callFinal, stream, state); - } else { - state.prefinished = true; - stream.emit('prefinish'); - } - } -} - -function finishMaybe(stream, state) { - var need = needFinish(state); - - if (need) { - prefinish(stream, state); - - if (state.pendingcb === 0) { - state.finished = true; - stream.emit('finish'); - - if (state.autoDestroy) { - // In case of duplex streams we need a way to detect - // if the readable side is ready for autoDestroy as well - var rState = stream._readableState; - - if (!rState || rState.autoDestroy && rState.endEmitted) { - stream.destroy(); - } - } - } - } - - return need; -} - -function endWritable(stream, state, cb) { - state.ending = true; - finishMaybe(stream, state); - - if (cb) { - if (state.finished) process.nextTick(cb);else stream.once('finish', cb); - } - - state.ended = true; - stream.writable = false; -} - -function onCorkedFinish(corkReq, state, err) { - var entry = corkReq.entry; - corkReq.entry = null; - - while (entry) { - var cb = entry.callback; - state.pendingcb--; - cb(err); - entry = entry.next; - } // reuse the free corkReq. - - - state.corkedRequestsFree.next = corkReq; -} - -Object.defineProperty(Writable.prototype, 'destroyed', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - if (this._writableState === undefined) { - return false; - } - - return this._writableState.destroyed; - }, - set: function set(value) { - // we ignore the value if the stream - // has not been initialized yet - if (!this._writableState) { - return; - } // backward compatibility, the user is explicitly - // managing destroyed - - - this._writableState.destroyed = value; - } -}); -Writable.prototype.destroy = destroyImpl.destroy; -Writable.prototype._undestroy = destroyImpl.undestroy; - -Writable.prototype._destroy = function (err, cb) { - cb(err); -}; - -/***/ }), - -/***/ 37558: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var _Object$setPrototypeO; - -function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } - -var finished = __nccwpck_require__(12659); - -var kLastResolve = Symbol('lastResolve'); -var kLastReject = Symbol('lastReject'); -var kError = Symbol('error'); -var kEnded = Symbol('ended'); -var kLastPromise = Symbol('lastPromise'); -var kHandlePromise = Symbol('handlePromise'); -var kStream = Symbol('stream'); - -function createIterResult(value, done) { - return { - value: value, - done: done - }; -} - -function readAndResolve(iter) { - var resolve = iter[kLastResolve]; - - if (resolve !== null) { - var data = iter[kStream].read(); // we defer if data is null - // we can be expecting either 'end' or - // 'error' - - if (data !== null) { - iter[kLastPromise] = null; - iter[kLastResolve] = null; - iter[kLastReject] = null; - resolve(createIterResult(data, false)); - } - } -} - -function onReadable(iter) { - // we wait for the next tick, because it might - // emit an error with process.nextTick - process.nextTick(readAndResolve, iter); -} - -function wrapForNext(lastPromise, iter) { - return function (resolve, reject) { - lastPromise.then(function () { - if (iter[kEnded]) { - resolve(createIterResult(undefined, true)); - return; - } - - iter[kHandlePromise](resolve, reject); - }, reject); - }; -} - -var AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); -var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = { - get stream() { - return this[kStream]; - }, - - next: function next() { - var _this = this; - - // if we have detected an error in the meanwhile - // reject straight away - var error = this[kError]; - - if (error !== null) { - return Promise.reject(error); - } - - if (this[kEnded]) { - return Promise.resolve(createIterResult(undefined, true)); - } - - if (this[kStream].destroyed) { - // We need to defer via nextTick because if .destroy(err) is - // called, the error will be emitted via nextTick, and - // we cannot guarantee that there is no error lingering around - // waiting to be emitted. - return new Promise(function (resolve, reject) { - process.nextTick(function () { - if (_this[kError]) { - reject(_this[kError]); - } else { - resolve(createIterResult(undefined, true)); - } - }); - }); - } // if we have multiple next() calls - // we will wait for the previous Promise to finish - // this logic is optimized to support for await loops, - // where next() is only called once at a time - - - var lastPromise = this[kLastPromise]; - var promise; - - if (lastPromise) { - promise = new Promise(wrapForNext(lastPromise, this)); - } else { - // fast path needed to support multiple this.push() - // without triggering the next() queue - var data = this[kStream].read(); - - if (data !== null) { - return Promise.resolve(createIterResult(data, false)); - } - - promise = new Promise(this[kHandlePromise]); - } - - this[kLastPromise] = promise; - return promise; - } -}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () { - return this; -}), _defineProperty(_Object$setPrototypeO, "return", function _return() { - var _this2 = this; - - // destroy(err, cb) is a private API - // we can guarantee we have that here, because we control the - // Readable class this is attached to - return new Promise(function (resolve, reject) { - _this2[kStream].destroy(null, function (err) { - if (err) { - reject(err); - return; - } - - resolve(createIterResult(undefined, true)); - }); - }); -}), _Object$setPrototypeO), AsyncIteratorPrototype); - -var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) { - var _Object$create; - - var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, { - value: stream, - writable: true - }), _defineProperty(_Object$create, kLastResolve, { - value: null, - writable: true - }), _defineProperty(_Object$create, kLastReject, { - value: null, - writable: true - }), _defineProperty(_Object$create, kError, { - value: null, - writable: true - }), _defineProperty(_Object$create, kEnded, { - value: stream._readableState.endEmitted, - writable: true - }), _defineProperty(_Object$create, kHandlePromise, { - value: function value(resolve, reject) { - var data = iterator[kStream].read(); - - if (data) { - iterator[kLastPromise] = null; - iterator[kLastResolve] = null; - iterator[kLastReject] = null; - resolve(createIterResult(data, false)); - } else { - iterator[kLastResolve] = resolve; - iterator[kLastReject] = reject; - } - }, - writable: true - }), _Object$create)); - iterator[kLastPromise] = null; - finished(stream, function (err) { - if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { - var reject = iterator[kLastReject]; // reject if we are waiting for data in the Promise - // returned by next() and store the error - - if (reject !== null) { - iterator[kLastPromise] = null; - iterator[kLastResolve] = null; - iterator[kLastReject] = null; - reject(err); - } - - iterator[kError] = err; - return; - } - - var resolve = iterator[kLastResolve]; - - if (resolve !== null) { - iterator[kLastPromise] = null; - iterator[kLastResolve] = null; - iterator[kLastReject] = null; - resolve(createIterResult(undefined, true)); - } - - iterator[kEnded] = true; - }); - stream.on('readable', onReadable.bind(null, iterator)); - return iterator; -}; - -module.exports = createReadableStreamAsyncIterator; - -/***/ }), - -/***/ 70662: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; } - -function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; } - -function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } - -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - -function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } - -function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } - -var _require = __nccwpck_require__(64293), - Buffer = _require.Buffer; - -var _require2 = __nccwpck_require__(31669), - inspect = _require2.inspect; - -var custom = inspect && inspect.custom || 'inspect'; - -function copyBuffer(src, target, offset) { - Buffer.prototype.copy.call(src, target, offset); -} - -module.exports = -/*#__PURE__*/ -function () { - function BufferList() { - _classCallCheck(this, BufferList); - - this.head = null; - this.tail = null; - this.length = 0; - } - - _createClass(BufferList, [{ - key: "push", - value: function push(v) { - var entry = { - data: v, - next: null - }; - if (this.length > 0) this.tail.next = entry;else this.head = entry; - this.tail = entry; - ++this.length; - } - }, { - key: "unshift", - value: function unshift(v) { - var entry = { - data: v, - next: this.head - }; - if (this.length === 0) this.tail = entry; - this.head = entry; - ++this.length; - } - }, { - key: "shift", - value: function shift() { - if (this.length === 0) return; - var ret = this.head.data; - if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; - --this.length; - return ret; - } - }, { - key: "clear", - value: function clear() { - this.head = this.tail = null; - this.length = 0; - } - }, { - key: "join", - value: function join(s) { - if (this.length === 0) return ''; - var p = this.head; - var ret = '' + p.data; - - while (p = p.next) { - ret += s + p.data; - } - - return ret; - } - }, { - key: "concat", - value: function concat(n) { - if (this.length === 0) return Buffer.alloc(0); - var ret = Buffer.allocUnsafe(n >>> 0); - var p = this.head; - var i = 0; - - while (p) { - copyBuffer(p.data, ret, i); - i += p.data.length; - p = p.next; - } - - return ret; - } // Consumes a specified amount of bytes or characters from the buffered data. - - }, { - key: "consume", - value: function consume(n, hasStrings) { - var ret; - - if (n < this.head.data.length) { - // `slice` is the same for buffers and strings. - ret = this.head.data.slice(0, n); - this.head.data = this.head.data.slice(n); - } else if (n === this.head.data.length) { - // First chunk is a perfect match. - ret = this.shift(); - } else { - // Result spans more than one buffer. - ret = hasStrings ? this._getString(n) : this._getBuffer(n); - } - - return ret; - } - }, { - key: "first", - value: function first() { - return this.head.data; - } // Consumes a specified amount of characters from the buffered data. - - }, { - key: "_getString", - value: function _getString(n) { - var p = this.head; - var c = 1; - var ret = p.data; - n -= ret.length; - - while (p = p.next) { - var str = p.data; - var nb = n > str.length ? str.length : n; - if (nb === str.length) ret += str;else ret += str.slice(0, n); - n -= nb; - - if (n === 0) { - if (nb === str.length) { - ++c; - if (p.next) this.head = p.next;else this.head = this.tail = null; - } else { - this.head = p; - p.data = str.slice(nb); - } - - break; - } - - ++c; - } - - this.length -= c; - return ret; - } // Consumes a specified amount of bytes from the buffered data. - - }, { - key: "_getBuffer", - value: function _getBuffer(n) { - var ret = Buffer.allocUnsafe(n); - var p = this.head; - var c = 1; - p.data.copy(ret); - n -= p.data.length; - - while (p = p.next) { - var buf = p.data; - var nb = n > buf.length ? buf.length : n; - buf.copy(ret, ret.length - n, 0, nb); - n -= nb; - - if (n === 0) { - if (nb === buf.length) { - ++c; - if (p.next) this.head = p.next;else this.head = this.tail = null; - } else { - this.head = p; - p.data = buf.slice(nb); - } - - break; - } - - ++c; - } - - this.length -= c; - return ret; - } // Make sure the linked list only shows the minimal necessary information. - - }, { - key: custom, - value: function value(_, options) { - return inspect(this, _objectSpread({}, options, { - // Only inspect one level. - depth: 0, - // It should not recurse. - customInspect: false - })); - } - }]); - - return BufferList; -}(); - -/***/ }), - -/***/ 36994: -/***/ ((module) => { - -"use strict"; - // undocumented cb() API, needed for core, not for public API - -function destroy(err, cb) { - var _this = this; - - var readableDestroyed = this._readableState && this._readableState.destroyed; - var writableDestroyed = this._writableState && this._writableState.destroyed; - - if (readableDestroyed || writableDestroyed) { - if (cb) { - cb(err); - } else if (err) { - if (!this._writableState) { - process.nextTick(emitErrorNT, this, err); - } else if (!this._writableState.errorEmitted) { - this._writableState.errorEmitted = true; - process.nextTick(emitErrorNT, this, err); - } - } - - return this; - } // we set destroyed to true before firing error callbacks in order - // to make it re-entrance safe in case destroy() is called within callbacks - - - if (this._readableState) { - this._readableState.destroyed = true; - } // if this is a duplex stream mark the writable part as destroyed as well - - - if (this._writableState) { - this._writableState.destroyed = true; - } - - this._destroy(err || null, function (err) { - if (!cb && err) { - if (!_this._writableState) { - process.nextTick(emitErrorAndCloseNT, _this, err); - } else if (!_this._writableState.errorEmitted) { - _this._writableState.errorEmitted = true; - process.nextTick(emitErrorAndCloseNT, _this, err); - } else { - process.nextTick(emitCloseNT, _this); - } - } else if (cb) { - process.nextTick(emitCloseNT, _this); - cb(err); - } else { - process.nextTick(emitCloseNT, _this); - } - }); - - return this; -} - -function emitErrorAndCloseNT(self, err) { - emitErrorNT(self, err); - emitCloseNT(self); -} - -function emitCloseNT(self) { - if (self._writableState && !self._writableState.emitClose) return; - if (self._readableState && !self._readableState.emitClose) return; - self.emit('close'); -} - -function undestroy() { - if (this._readableState) { - this._readableState.destroyed = false; - this._readableState.reading = false; - this._readableState.ended = false; - this._readableState.endEmitted = false; - } - - if (this._writableState) { - this._writableState.destroyed = false; - this._writableState.ended = false; - this._writableState.ending = false; - this._writableState.finalCalled = false; - this._writableState.prefinished = false; - this._writableState.finished = false; - this._writableState.errorEmitted = false; - } -} - -function emitErrorNT(self, err) { - self.emit('error', err); -} - -function errorOrDestroy(stream, err) { - // We have tests that rely on errors being emitted - // in the same tick, so changing this is semver major. - // For now when you opt-in to autoDestroy we allow - // the error to be emitted nextTick. In a future - // semver major update we should change the default to this. - var rState = stream._readableState; - var wState = stream._writableState; - if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); -} - -module.exports = { - destroy: destroy, - undestroy: undestroy, - errorOrDestroy: errorOrDestroy -}; - -/***/ }), - -/***/ 12659: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; -// Ported from https://github.com/mafintosh/end-of-stream with -// permission from the author, Mathias Buus (@mafintosh). - - -var ERR_STREAM_PREMATURE_CLOSE = __nccwpck_require__(97713)/* .codes.ERR_STREAM_PREMATURE_CLOSE */ .q.ERR_STREAM_PREMATURE_CLOSE; - -function once(callback) { - var called = false; - return function () { - if (called) return; - called = true; - - for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { - args[_key] = arguments[_key]; - } - - callback.apply(this, args); - }; -} - -function noop() {} - -function isRequest(stream) { - return stream.setHeader && typeof stream.abort === 'function'; -} - -function eos(stream, opts, callback) { - if (typeof opts === 'function') return eos(stream, null, opts); - if (!opts) opts = {}; - callback = once(callback || noop); - var readable = opts.readable || opts.readable !== false && stream.readable; - var writable = opts.writable || opts.writable !== false && stream.writable; - - var onlegacyfinish = function onlegacyfinish() { - if (!stream.writable) onfinish(); - }; - - var writableEnded = stream._writableState && stream._writableState.finished; - - var onfinish = function onfinish() { - writable = false; - writableEnded = true; - if (!readable) callback.call(stream); - }; - - var readableEnded = stream._readableState && stream._readableState.endEmitted; - - var onend = function onend() { - readable = false; - readableEnded = true; - if (!writable) callback.call(stream); - }; - - var onerror = function onerror(err) { - callback.call(stream, err); - }; - - var onclose = function onclose() { - var err; - - if (readable && !readableEnded) { - if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); - return callback.call(stream, err); - } - - if (writable && !writableEnded) { - if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); - return callback.call(stream, err); - } - }; - - var onrequest = function onrequest() { - stream.req.on('finish', onfinish); - }; - - if (isRequest(stream)) { - stream.on('complete', onfinish); - stream.on('abort', onclose); - if (stream.req) onrequest();else stream.on('request', onrequest); - } else if (writable && !stream._writableState) { - // legacy streams - stream.on('end', onlegacyfinish); - stream.on('close', onlegacyfinish); - } - - stream.on('end', onend); - stream.on('finish', onfinish); - if (opts.error !== false) stream.on('error', onerror); - stream.on('close', onclose); - return function () { - stream.removeListener('complete', onfinish); - stream.removeListener('abort', onclose); - stream.removeListener('request', onrequest); - if (stream.req) stream.req.removeListener('finish', onfinish); - stream.removeListener('end', onlegacyfinish); - stream.removeListener('close', onlegacyfinish); - stream.removeListener('finish', onfinish); - stream.removeListener('end', onend); - stream.removeListener('error', onerror); - stream.removeListener('close', onclose); - }; -} - -module.exports = eos; - -/***/ }), - -/***/ 57039: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } - -function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } - -function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; } - -function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; } - -function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } - -var ERR_INVALID_ARG_TYPE = __nccwpck_require__(97713)/* .codes.ERR_INVALID_ARG_TYPE */ .q.ERR_INVALID_ARG_TYPE; - -function from(Readable, iterable, opts) { - var iterator; - - if (iterable && typeof iterable.next === 'function') { - iterator = iterable; - } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); - - var readable = new Readable(_objectSpread({ - objectMode: true - }, opts)); // Reading boolean to protect against _read - // being called before last iteration completion. - - var reading = false; - - readable._read = function () { - if (!reading) { - reading = true; - next(); - } - }; - - function next() { - return _next2.apply(this, arguments); - } - - function _next2() { - _next2 = _asyncToGenerator(function* () { - try { - var _ref = yield iterator.next(), - value = _ref.value, - done = _ref.done; - - if (done) { - readable.push(null); - } else if (readable.push((yield value))) { - next(); - } else { - reading = false; - } - } catch (err) { - readable.destroy(err); - } - }); - return _next2.apply(this, arguments); - } - - return readable; -} - -module.exports = from; - -/***/ }), - -/***/ 20740: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; -// Ported from https://github.com/mafintosh/pump with -// permission from the author, Mathias Buus (@mafintosh). - - -var eos; - -function once(callback) { - var called = false; - return function () { - if (called) return; - called = true; - callback.apply(void 0, arguments); - }; -} - -var _require$codes = __nccwpck_require__(97713)/* .codes */ .q, - ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, - ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; - -function noop(err) { - // Rethrow the error if it exists to avoid swallowing it - if (err) throw err; -} - -function isRequest(stream) { - return stream.setHeader && typeof stream.abort === 'function'; -} - -function destroyer(stream, reading, writing, callback) { - callback = once(callback); - var closed = false; - stream.on('close', function () { - closed = true; - }); - if (eos === undefined) eos = __nccwpck_require__(12659); - eos(stream, { - readable: reading, - writable: writing - }, function (err) { - if (err) return callback(err); - closed = true; - callback(); - }); - var destroyed = false; - return function (err) { - if (closed) return; - if (destroyed) return; - destroyed = true; // request.destroy just do .end - .abort is what we want - - if (isRequest(stream)) return stream.abort(); - if (typeof stream.destroy === 'function') return stream.destroy(); - callback(err || new ERR_STREAM_DESTROYED('pipe')); - }; -} - -function call(fn) { - fn(); -} - -function pipe(from, to) { - return from.pipe(to); -} - -function popCallback(streams) { - if (!streams.length) return noop; - if (typeof streams[streams.length - 1] !== 'function') return noop; - return streams.pop(); -} - -function pipeline() { - for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { - streams[_key] = arguments[_key]; - } - - var callback = popCallback(streams); - if (Array.isArray(streams[0])) streams = streams[0]; - - if (streams.length < 2) { - throw new ERR_MISSING_ARGS('streams'); - } - - var error; - var destroys = streams.map(function (stream, i) { - var reading = i < streams.length - 1; - var writing = i > 0; - return destroyer(stream, reading, writing, function (err) { - if (!error) error = err; - if (err) destroys.forEach(call); - if (reading) return; - destroys.forEach(call); - callback(error); - }); - }); - return streams.reduce(pipe); -} - -module.exports = pipeline; - -/***/ }), - -/***/ 3533: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -var ERR_INVALID_OPT_VALUE = __nccwpck_require__(97713)/* .codes.ERR_INVALID_OPT_VALUE */ .q.ERR_INVALID_OPT_VALUE; - -function highWaterMarkFrom(options, isDuplex, duplexKey) { - return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; -} - -function getHighWaterMark(state, options, duplexKey, isDuplex) { - var hwm = highWaterMarkFrom(options, isDuplex, duplexKey); - - if (hwm != null) { - if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { - var name = isDuplex ? duplexKey : 'highWaterMark'; - throw new ERR_INVALID_OPT_VALUE(name, hwm); - } - - return Math.floor(hwm); - } // Default value - - - return state.objectMode ? 16 : 16 * 1024; -} - -module.exports = { - getHighWaterMark: getHighWaterMark -}; - -/***/ }), - -/***/ 36238: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = __nccwpck_require__(92413); - - -/***/ }), - -/***/ 85519: -/***/ ((module, exports, __nccwpck_require__) => { - -var Stream = __nccwpck_require__(92413); -if (process.env.READABLE_STREAM === 'disable' && Stream) { - module.exports = Stream.Readable; - Object.assign(module.exports, Stream); - module.exports.Stream = Stream; -} else { - exports = module.exports = __nccwpck_require__(85209); - exports.Stream = Stream || exports; - exports.Readable = exports; - exports.Writable = __nccwpck_require__(58729); - exports.Duplex = __nccwpck_require__(13928); - exports.Transform = __nccwpck_require__(26753); - exports.PassThrough = __nccwpck_require__(4991); - exports.finished = __nccwpck_require__(12659); - exports.pipeline = __nccwpck_require__(20740); -} - - -/***/ }), - -/***/ 33717: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var concatMap = __nccwpck_require__(86891); -var balanced = __nccwpck_require__(9417); - -module.exports = expandTop; - -var escSlash = '\0SLASH'+Math.random()+'\0'; -var escOpen = '\0OPEN'+Math.random()+'\0'; -var escClose = '\0CLOSE'+Math.random()+'\0'; -var escComma = '\0COMMA'+Math.random()+'\0'; -var escPeriod = '\0PERIOD'+Math.random()+'\0'; - -function numeric(str) { - return parseInt(str, 10) == str - ? parseInt(str, 10) - : str.charCodeAt(0); -} - -function escapeBraces(str) { - return str.split('\\\\').join(escSlash) - .split('\\{').join(escOpen) - .split('\\}').join(escClose) - .split('\\,').join(escComma) - .split('\\.').join(escPeriod); -} - -function unescapeBraces(str) { - return str.split(escSlash).join('\\') - .split(escOpen).join('{') - .split(escClose).join('}') - .split(escComma).join(',') - .split(escPeriod).join('.'); -} - - -// Basically just str.split(","), but handling cases -// where we have nested braced sections, which should be -// treated as individual members, like {a,{b,c},d} -function parseCommaParts(str) { - if (!str) - return ['']; - - var parts = []; - var m = balanced('{', '}', str); - - if (!m) - return str.split(','); - - var pre = m.pre; - var body = m.body; - var post = m.post; - var p = pre.split(','); - - p[p.length-1] += '{' + body + '}'; - var postParts = parseCommaParts(post); - if (post.length) { - p[p.length-1] += postParts.shift(); - p.push.apply(p, postParts); - } - - parts.push.apply(parts, p); - - return parts; -} - -function expandTop(str) { - if (!str) - return []; - - // I don't know why Bash 4.3 does this, but it does. - // Anything starting with {} will have the first two bytes preserved - // but *only* at the top level, so {},a}b will not expand to anything, - // but a{},b}c will be expanded to [a}c,abc]. - // One could argue that this is a bug in Bash, but since the goal of - // this module is to match Bash's rules, we escape a leading {} - if (str.substr(0, 2) === '{}') { - str = '\\{\\}' + str.substr(2); - } - - return expand(escapeBraces(str), true).map(unescapeBraces); -} - -function identity(e) { - return e; -} - -function embrace(str) { - return '{' + str + '}'; -} -function isPadded(el) { - return /^-?0\d/.test(el); -} - -function lte(i, y) { - return i <= y; -} -function gte(i, y) { - return i >= y; -} - -function expand(str, isTop) { - var expansions = []; - - var m = balanced('{', '}', str); - if (!m || /\$$/.test(m.pre)) return [str]; - - var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); - var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); - var isSequence = isNumericSequence || isAlphaSequence; - var isOptions = m.body.indexOf(',') >= 0; - if (!isSequence && !isOptions) { - // {a},b} - if (m.post.match(/,.*\}/)) { - str = m.pre + '{' + m.body + escClose + m.post; - return expand(str); - } - return [str]; - } - - var n; - if (isSequence) { - n = m.body.split(/\.\./); - } else { - n = parseCommaParts(m.body); - if (n.length === 1) { - // x{{a,b}}y ==> x{a}y x{b}y - n = expand(n[0], false).map(embrace); - if (n.length === 1) { - var post = m.post.length - ? expand(m.post, false) - : ['']; - return post.map(function(p) { - return m.pre + n[0] + p; - }); - } - } - } - - // at this point, n is the parts, and we know it's not a comma set - // with a single entry. - - // no need to expand pre, since it is guaranteed to be free of brace-sets - var pre = m.pre; - var post = m.post.length - ? expand(m.post, false) - : ['']; - - var N; - - if (isSequence) { - var x = numeric(n[0]); - var y = numeric(n[1]); - var width = Math.max(n[0].length, n[1].length) - var incr = n.length == 3 - ? Math.abs(numeric(n[2])) - : 1; - var test = lte; - var reverse = y < x; - if (reverse) { - incr *= -1; - test = gte; - } - var pad = n.some(isPadded); - - N = []; - - for (var i = x; test(i, y); i += incr) { - var c; - if (isAlphaSequence) { - c = String.fromCharCode(i); - if (c === '\\') - c = ''; - } else { - c = String(i); - if (pad) { - var need = width - c.length; - if (need > 0) { - var z = new Array(need + 1).join('0'); - if (i < 0) - c = '-' + z + c.slice(1); - else - c = z + c; - } - } - } - N.push(c); - } - } else { - N = concatMap(n, function(el) { return expand(el, false) }); - } - - for (var j = 0; j < N.length; j++) { - for (var k = 0; k < post.length; k++) { - var expansion = pre + N[j] + post[k]; - if (!isTop || isSequence || expansion) - expansions.push(expansion); - } - } - - return expansions; -} - - - -/***/ }), - -/***/ 9239: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; -/*jshint node:true */ - -var Buffer = __nccwpck_require__(64293).Buffer; // browserify -var SlowBuffer = __nccwpck_require__(64293).SlowBuffer; - -module.exports = bufferEq; - -function bufferEq(a, b) { - - // shortcutting on type is necessary for correctness - if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) { - return false; - } - - // buffer sizes should be well-known information, so despite this - // shortcutting, it doesn't leak any information about the *contents* of the - // buffers. - if (a.length !== b.length) { - return false; - } - - var c = 0; - for (var i = 0; i < a.length; i++) { - /*jshint bitwise:false */ - c |= a[i] ^ b[i]; // XOR - } - return c === 0; -} - -bufferEq.install = function() { - Buffer.prototype.equal = SlowBuffer.prototype.equal = function equal(that) { - return bufferEq(this, that); - }; -}; - -var origBufEqual = Buffer.prototype.equal; -var origSlowBufEqual = SlowBuffer.prototype.equal; -bufferEq.restore = function() { - Buffer.prototype.equal = origBufEqual; - SlowBuffer.prototype.equal = origSlowBufEqual; -}; - - -/***/ }), - -/***/ 35684: -/***/ ((module) => { - -function Caseless (dict) { - this.dict = dict || {} -} -Caseless.prototype.set = function (name, value, clobber) { - if (typeof name === 'object') { - for (var i in name) { - this.set(i, name[i], value) - } - } else { - if (typeof clobber === 'undefined') clobber = true - var has = this.has(name) - - if (!clobber && has) this.dict[has] = this.dict[has] + ',' + value - else this.dict[has || name] = value - return has - } -} -Caseless.prototype.has = function (name) { - var keys = Object.keys(this.dict) - , name = name.toLowerCase() - ; - for (var i=0;i { - -"use strict"; - -const ansiStyles = __nccwpck_require__(52068); -const {stdout: stdoutColor, stderr: stderrColor} = __nccwpck_require__(59318); -const { - stringReplaceAll, - stringEncaseCRLFWithFirstIndex -} = __nccwpck_require__(82415); - -const {isArray} = Array; - -// `supportsColor.level` → `ansiStyles.color[name]` mapping -const levelMapping = [ - 'ansi', - 'ansi', - 'ansi256', - 'ansi16m' -]; - -const styles = Object.create(null); - -const applyOptions = (object, options = {}) => { - if (options.level && !(Number.isInteger(options.level) && options.level >= 0 && options.level <= 3)) { - throw new Error('The `level` option should be an integer from 0 to 3'); - } - - // Detect level if not set manually - const colorLevel = stdoutColor ? stdoutColor.level : 0; - object.level = options.level === undefined ? colorLevel : options.level; -}; - -class ChalkClass { - constructor(options) { - // eslint-disable-next-line no-constructor-return - return chalkFactory(options); - } -} - -const chalkFactory = options => { - const chalk = {}; - applyOptions(chalk, options); - - chalk.template = (...arguments_) => chalkTag(chalk.template, ...arguments_); - - Object.setPrototypeOf(chalk, Chalk.prototype); - Object.setPrototypeOf(chalk.template, chalk); - - chalk.template.constructor = () => { - throw new Error('`chalk.constructor()` is deprecated. Use `new chalk.Instance()` instead.'); - }; - - chalk.template.Instance = ChalkClass; - - return chalk.template; -}; - -function Chalk(options) { - return chalkFactory(options); -} - -for (const [styleName, style] of Object.entries(ansiStyles)) { - styles[styleName] = { - get() { - const builder = createBuilder(this, createStyler(style.open, style.close, this._styler), this._isEmpty); - Object.defineProperty(this, styleName, {value: builder}); - return builder; - } - }; -} - -styles.visible = { - get() { - const builder = createBuilder(this, this._styler, true); - Object.defineProperty(this, 'visible', {value: builder}); - return builder; - } -}; - -const usedModels = ['rgb', 'hex', 'keyword', 'hsl', 'hsv', 'hwb', 'ansi', 'ansi256']; - -for (const model of usedModels) { - styles[model] = { - get() { - const {level} = this; - return function (...arguments_) { - const styler = createStyler(ansiStyles.color[levelMapping[level]][model](...arguments_), ansiStyles.color.close, this._styler); - return createBuilder(this, styler, this._isEmpty); - }; - } - }; -} - -for (const model of usedModels) { - const bgModel = 'bg' + model[0].toUpperCase() + model.slice(1); - styles[bgModel] = { - get() { - const {level} = this; - return function (...arguments_) { - const styler = createStyler(ansiStyles.bgColor[levelMapping[level]][model](...arguments_), ansiStyles.bgColor.close, this._styler); - return createBuilder(this, styler, this._isEmpty); - }; - } - }; -} - -const proto = Object.defineProperties(() => {}, { - ...styles, - level: { - enumerable: true, - get() { - return this._generator.level; - }, - set(level) { - this._generator.level = level; - } - } -}); - -const createStyler = (open, close, parent) => { - let openAll; - let closeAll; - if (parent === undefined) { - openAll = open; - closeAll = close; - } else { - openAll = parent.openAll + open; - closeAll = close + parent.closeAll; - } - - return { - open, - close, - openAll, - closeAll, - parent - }; -}; - -const createBuilder = (self, _styler, _isEmpty) => { - const builder = (...arguments_) => { - if (isArray(arguments_[0]) && isArray(arguments_[0].raw)) { - // Called as a template literal, for example: chalk.red`2 + 3 = {bold ${2+3}}` - return applyStyle(builder, chalkTag(builder, ...arguments_)); - } - - // Single argument is hot path, implicit coercion is faster than anything - // eslint-disable-next-line no-implicit-coercion - return applyStyle(builder, (arguments_.length === 1) ? ('' + arguments_[0]) : arguments_.join(' ')); - }; - - // We alter the prototype because we must return a function, but there is - // no way to create a function with a different prototype - Object.setPrototypeOf(builder, proto); - - builder._generator = self; - builder._styler = _styler; - builder._isEmpty = _isEmpty; - - return builder; -}; - -const applyStyle = (self, string) => { - if (self.level <= 0 || !string) { - return self._isEmpty ? '' : string; - } - - let styler = self._styler; - - if (styler === undefined) { - return string; - } - - const {openAll, closeAll} = styler; - if (string.indexOf('\u001B') !== -1) { - while (styler !== undefined) { - // Replace any instances already present with a re-opening code - // otherwise only the part of the string until said closing code - // will be colored, and the rest will simply be 'plain'. - string = stringReplaceAll(string, styler.close, styler.open); - - styler = styler.parent; - } - } - - // We can move both next actions out of loop, because remaining actions in loop won't have - // any/visible effect on parts we add here. Close the styling before a linebreak and reopen - // after next line to fix a bleed issue on macOS: https://github.com/chalk/chalk/pull/92 - const lfIndex = string.indexOf('\n'); - if (lfIndex !== -1) { - string = stringEncaseCRLFWithFirstIndex(string, closeAll, openAll, lfIndex); - } - - return openAll + string + closeAll; -}; - -let template; -const chalkTag = (chalk, ...strings) => { - const [firstString] = strings; - - if (!isArray(firstString) || !isArray(firstString.raw)) { - // If chalk() was called by itself or with a string, - // return the string itself as a string. - return strings.join(' '); - } - - const arguments_ = strings.slice(1); - const parts = [firstString.raw[0]]; - - for (let i = 1; i < firstString.length; i++) { - parts.push( - String(arguments_[i - 1]).replace(/[{}\\]/g, '\\$&'), - String(firstString.raw[i]) - ); - } - - if (template === undefined) { - template = __nccwpck_require__(20500); - } - - return template(chalk, parts.join('')); -}; - -Object.defineProperties(Chalk.prototype, styles); - -const chalk = Chalk(); // eslint-disable-line new-cap -chalk.supportsColor = stdoutColor; -chalk.stderr = Chalk({level: stderrColor ? stderrColor.level : 0}); // eslint-disable-line new-cap -chalk.stderr.supportsColor = stderrColor; - -module.exports = chalk; - - -/***/ }), - -/***/ 20500: -/***/ ((module) => { - -"use strict"; - -const TEMPLATE_REGEX = /(?:\\(u(?:[a-f\d]{4}|\{[a-f\d]{1,6}\})|x[a-f\d]{2}|.))|(?:\{(~)?(\w+(?:\([^)]*\))?(?:\.\w+(?:\([^)]*\))?)*)(?:[ \t]|(?=\r?\n)))|(\})|((?:.|[\r\n\f])+?)/gi; -const STYLE_REGEX = /(?:^|\.)(\w+)(?:\(([^)]*)\))?/g; -const STRING_REGEX = /^(['"])((?:\\.|(?!\1)[^\\])*)\1$/; -const ESCAPE_REGEX = /\\(u(?:[a-f\d]{4}|{[a-f\d]{1,6}})|x[a-f\d]{2}|.)|([^\\])/gi; - -const ESCAPES = new Map([ - ['n', '\n'], - ['r', '\r'], - ['t', '\t'], - ['b', '\b'], - ['f', '\f'], - ['v', '\v'], - ['0', '\0'], - ['\\', '\\'], - ['e', '\u001B'], - ['a', '\u0007'] -]); - -function unescape(c) { - const u = c[0] === 'u'; - const bracket = c[1] === '{'; - - if ((u && !bracket && c.length === 5) || (c[0] === 'x' && c.length === 3)) { - return String.fromCharCode(parseInt(c.slice(1), 16)); - } - - if (u && bracket) { - return String.fromCodePoint(parseInt(c.slice(2, -1), 16)); - } - - return ESCAPES.get(c) || c; -} - -function parseArguments(name, arguments_) { - const results = []; - const chunks = arguments_.trim().split(/\s*,\s*/g); - let matches; - - for (const chunk of chunks) { - const number = Number(chunk); - if (!Number.isNaN(number)) { - results.push(number); - } else if ((matches = chunk.match(STRING_REGEX))) { - results.push(matches[2].replace(ESCAPE_REGEX, (m, escape, character) => escape ? unescape(escape) : character)); - } else { - throw new Error(`Invalid Chalk template style argument: ${chunk} (in style '${name}')`); - } - } - - return results; -} - -function parseStyle(style) { - STYLE_REGEX.lastIndex = 0; - - const results = []; - let matches; - - while ((matches = STYLE_REGEX.exec(style)) !== null) { - const name = matches[1]; - - if (matches[2]) { - const args = parseArguments(name, matches[2]); - results.push([name].concat(args)); - } else { - results.push([name]); - } - } - - return results; -} - -function buildStyle(chalk, styles) { - const enabled = {}; - - for (const layer of styles) { - for (const style of layer.styles) { - enabled[style[0]] = layer.inverse ? null : style.slice(1); - } - } - - let current = chalk; - for (const [styleName, styles] of Object.entries(enabled)) { - if (!Array.isArray(styles)) { - continue; - } - - if (!(styleName in current)) { - throw new Error(`Unknown Chalk style: ${styleName}`); - } - - current = styles.length > 0 ? current[styleName](...styles) : current[styleName]; - } - - return current; -} - -module.exports = (chalk, temporary) => { - const styles = []; - const chunks = []; - let chunk = []; - - // eslint-disable-next-line max-params - temporary.replace(TEMPLATE_REGEX, (m, escapeCharacter, inverse, style, close, character) => { - if (escapeCharacter) { - chunk.push(unescape(escapeCharacter)); - } else if (style) { - const string = chunk.join(''); - chunk = []; - chunks.push(styles.length === 0 ? string : buildStyle(chalk, styles)(string)); - styles.push({inverse, styles: parseStyle(style)}); - } else if (close) { - if (styles.length === 0) { - throw new Error('Found extraneous } in Chalk template literal'); - } - - chunks.push(buildStyle(chalk, styles)(chunk.join(''))); - chunk = []; - styles.pop(); - } else { - chunk.push(character); - } - }); - - chunks.push(chunk.join('')); - - if (styles.length > 0) { - const errMessage = `Chalk template literal is missing ${styles.length} closing bracket${styles.length === 1 ? '' : 's'} (\`}\`)`; - throw new Error(errMessage); - } - - return chunks.join(''); -}; - - -/***/ }), - -/***/ 82415: -/***/ ((module) => { - -"use strict"; - - -const stringReplaceAll = (string, substring, replacer) => { - let index = string.indexOf(substring); - if (index === -1) { - return string; - } - - const substringLength = substring.length; - let endIndex = 0; - let returnValue = ''; - do { - returnValue += string.substr(endIndex, index - endIndex) + substring + replacer; - endIndex = index + substringLength; - index = string.indexOf(substring, endIndex); - } while (index !== -1); - - returnValue += string.substr(endIndex); - return returnValue; -}; - -const stringEncaseCRLFWithFirstIndex = (string, prefix, postfix, index) => { - let endIndex = 0; - let returnValue = ''; - do { - const gotCR = string[index - 1] === '\r'; - returnValue += string.substr(endIndex, (gotCR ? index - 1 : index) - endIndex) + prefix + (gotCR ? '\r\n' : '\n') + postfix; - endIndex = index + 1; - index = string.indexOf('\n', endIndex); - } while (index !== -1); - - returnValue += string.substr(endIndex); - return returnValue; -}; - -module.exports = { - stringReplaceAll, - stringEncaseCRLFWithFirstIndex -}; - - -/***/ }), - -/***/ 97074: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __exportStar = (this && this.__exportStar) || function(m, exports) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.supportsLanguage = exports.listLanguages = exports.highlight = void 0; -var hljs = __importStar(__nccwpck_require__(60082)); -var parse5 = __importStar(__nccwpck_require__(88933)); -var parse5_htmlparser2_tree_adapter_1 = __importDefault(__nccwpck_require__(9759)); -var theme_1 = __nccwpck_require__(81442); -function colorizeNode(node, theme, context) { - if (theme === void 0) { theme = {}; } - switch (node.type) { - case 'text': { - var text = node.data; - if (context === undefined) { - return (theme.default || theme_1.DEFAULT_THEME.default || theme_1.plain)(text); - } - return text; - } - case 'tag': { - var hljsClass = /hljs-(\w+)/.exec(node.attribs.class); - if (hljsClass) { - var token_1 = hljsClass[1]; - var nodeData = node.childNodes - .map(function (node) { return colorizeNode(node, theme, token_1); }) - .join(''); - return (theme[token_1] || theme_1.DEFAULT_THEME[token_1] || theme_1.plain)(nodeData); - } - // Return the data itself when the class name isn't prefixed with a highlight.js token prefix. - // This is common in instances of sublanguages (JSX, Markdown Code Blocks, etc.) - return node.childNodes.map(function (node) { return colorizeNode(node, theme); }).join(''); - } - } - throw new Error('Invalid node type ' + node.type); -} -function colorize(code, theme) { - if (theme === void 0) { theme = {}; } - var fragment = parse5.parseFragment(code, { - treeAdapter: parse5_htmlparser2_tree_adapter_1.default, - }); - return fragment.childNodes.map(function (node) { return colorizeNode(node, theme); }).join(''); -} -/** - * Apply syntax highlighting to `code` with ASCII color codes. The language is automatically - * detected if not set. - * - * ```ts - * import {highlight} from 'cli-highlight'; - * import * as fs from 'fs'; - * - * fs.readFile('package.json', 'utf8', (err: any, json: string) => { - * console.log('package.json:'); - * console.log(highlight(json)); - * }); - * ``` - * - * @param code The code to highlight - * @param options Optional options - */ -function highlight(code, options) { - if (options === void 0) { options = {}; } - var html; - if (options.language) { - html = hljs.highlight(options.language, code, options.ignoreIllegals, options.continuation).value; - } - else { - html = hljs.highlightAuto(code, options.languageSubset).value; - } - return colorize(html, options.theme); -} -exports.highlight = highlight; -/** - * Returns all supported languages - */ -function listLanguages() { - return hljs.listLanguages(); -} -exports.listLanguages = listLanguages; -/** - * Returns true if the language is supported - * @param name A language name, alias or file extension - */ -function supportsLanguage(name) { - return !!hljs.getLanguage(name); -} -exports.supportsLanguage = supportsLanguage; -exports.default = highlight; -__exportStar(__nccwpck_require__(81442), exports); -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 81442: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.parse = exports.stringify = exports.toJson = exports.fromJson = exports.DEFAULT_THEME = exports.plain = void 0; -var chalk_1 = __importDefault(__nccwpck_require__(78818)); -/** - * Identity function for tokens that should not be styled (returns the input string as-is). - * See [[Theme]] for an example. - */ -var plain = function (codePart) { return codePart; }; -exports.plain = plain; -/** - * The default theme. It is possible to override just individual keys. - */ -exports.DEFAULT_THEME = { - /** - * keyword in a regular Algol-style language - */ - keyword: chalk_1.default.blue, - /** - * built-in or library object (constant, class, function) - */ - built_in: chalk_1.default.cyan, - /** - * user-defined type in a language with first-class syntactically significant types, like - * Haskell - */ - type: chalk_1.default.cyan.dim, - /** - * special identifier for a built-in value ("true", "false", "null") - */ - literal: chalk_1.default.blue, - /** - * number, including units and modifiers, if any. - */ - number: chalk_1.default.green, - /** - * literal regular expression - */ - regexp: chalk_1.default.red, - /** - * literal string, character - */ - string: chalk_1.default.red, - /** - * parsed section inside a literal string - */ - subst: exports.plain, - /** - * symbolic constant, interned string, goto label - */ - symbol: exports.plain, - /** - * class or class-level declaration (interfaces, traits, modules, etc) - */ - class: chalk_1.default.blue, - /** - * function or method declaration - */ - function: chalk_1.default.yellow, - /** - * name of a class or a function at the place of declaration - */ - title: exports.plain, - /** - * block of function arguments (parameters) at the place of declaration - */ - params: exports.plain, - /** - * comment - */ - comment: chalk_1.default.green, - /** - * documentation markup within comments - */ - doctag: chalk_1.default.green, - /** - * flags, modifiers, annotations, processing instructions, preprocessor directive, etc - */ - meta: chalk_1.default.grey, - /** - * keyword or built-in within meta construct - */ - 'meta-keyword': exports.plain, - /** - * string within meta construct - */ - 'meta-string': exports.plain, - /** - * heading of a section in a config file, heading in text markup - */ - section: exports.plain, - /** - * XML/HTML tag - */ - tag: chalk_1.default.grey, - /** - * name of an XML tag, the first word in an s-expression - */ - name: chalk_1.default.blue, - /** - * s-expression name from the language standard library - */ - 'builtin-name': exports.plain, - /** - * name of an attribute with no language defined semantics (keys in JSON, setting names in - * .ini), also sub-attribute within another highlighted object, like XML tag - */ - attr: chalk_1.default.cyan, - /** - * name of an attribute followed by a structured value part, like CSS properties - */ - attribute: exports.plain, - /** - * variable in a config or a template file, environment var expansion in a script - */ - variable: exports.plain, - /** - * list item bullet in text markup - */ - bullet: exports.plain, - /** - * code block in text markup - */ - code: exports.plain, - /** - * emphasis in text markup - */ - emphasis: chalk_1.default.italic, - /** - * strong emphasis in text markup - */ - strong: chalk_1.default.bold, - /** - * mathematical formula in text markup - */ - formula: exports.plain, - /** - * hyperlink in text markup - */ - link: chalk_1.default.underline, - /** - * quotation in text markup - */ - quote: exports.plain, - /** - * tag selector in CSS - */ - 'selector-tag': exports.plain, - /** - * #id selector in CSS - */ - 'selector-id': exports.plain, - /** - * .class selector in CSS - */ - 'selector-class': exports.plain, - /** - * [attr] selector in CSS - */ - 'selector-attr': exports.plain, - /** - * :pseudo selector in CSS - */ - 'selector-pseudo': exports.plain, - /** - * tag of a template language - */ - 'template-tag': exports.plain, - /** - * variable in a template language - */ - 'template-variable': exports.plain, - /** - * added or changed line in a diff - */ - addition: chalk_1.default.green, - /** - * deleted line in a diff - */ - deletion: chalk_1.default.red, - /** - * things not matched by any token - */ - default: exports.plain, -}; -/** - * Converts a [[JsonTheme]] with string values to a [[Theme]] with formatter functions. Used by [[parse]]. - */ -function fromJson(json) { - var theme = {}; - for (var _i = 0, _a = Object.keys(json); _i < _a.length; _i++) { - var key = _a[_i]; - var style = json[key]; - if (Array.isArray(style)) { - ; - theme[key] = style.reduce(function (previous, current) { return (current === 'plain' ? exports.plain : previous[current]); }, chalk_1.default); - } - else { - ; - theme[key] = chalk_1.default[style]; - } - } - return theme; -} -exports.fromJson = fromJson; -/** - * Converts a [[Theme]] with formatter functions to a [[JsonTheme]] with string values. Used by [[stringify]]. - */ -function toJson(theme) { - var jsonTheme = {}; - for (var _i = 0, _a = Object.keys(jsonTheme); _i < _a.length; _i++) { - var key = _a[_i]; - var style = jsonTheme[key]; - jsonTheme[key] = style._styles; - } - return jsonTheme; -} -exports.toJson = toJson; -/** - * Stringifies a [[Theme]] with formatter functions to a JSON string. - * - * ```ts - * import chalk = require('chalk'); - * import {stringify} from 'cli-highlight'; - * import * as fs from 'fs'; - * - * const myTheme: Theme = { - * keyword: chalk.red.bold, - * addition: chalk.green, - * deletion: chalk.red.strikethrough, - * number: plain - * } - * const json = stringify(myTheme); - * fs.writeFile('mytheme.json', json, (err: any) => { - * if (err) throw err; - * console.log('Theme saved'); - * }); - * ``` - */ -function stringify(theme) { - return JSON.stringify(toJson(theme)); -} -exports.stringify = stringify; -/** - * Parses a JSON string into a [[Theme]] with formatter functions. - * - * ```ts - * import * as fs from 'fs'; - * import {parse, highlight} from 'cli-highlight'; - * - * fs.readFile('mytheme.json', 'utf8', (err: any, json: string) => { - * if (err) throw err; - * const code = highlight('SELECT * FROM table', {theme: parse(json)}); - * console.log(code); - * }); - * ``` - */ -function parse(json) { - return fromJson(JSON.parse(json)); -} -exports.parse = parse; -//# sourceMappingURL=theme.js.map - -/***/ }), - -/***/ 94281: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -const { DOCUMENT_MODE } = __nccwpck_require__(49578); - -//Const -const VALID_DOCTYPE_NAME = 'html'; -const VALID_SYSTEM_ID = 'about:legacy-compat'; -const QUIRKS_MODE_SYSTEM_ID = 'http://www.ibm.com/data/dtd/v11/ibmxhtml1-transitional.dtd'; - -const QUIRKS_MODE_PUBLIC_ID_PREFIXES = [ - '+//silmaril//dtd html pro v0r11 19970101//', - '-//as//dtd html 3.0 aswedit + extensions//', - '-//advasoft ltd//dtd html 3.0 aswedit + extensions//', - '-//ietf//dtd html 2.0 level 1//', - '-//ietf//dtd html 2.0 level 2//', - '-//ietf//dtd html 2.0 strict level 1//', - '-//ietf//dtd html 2.0 strict level 2//', - '-//ietf//dtd html 2.0 strict//', - '-//ietf//dtd html 2.0//', - '-//ietf//dtd html 2.1e//', - '-//ietf//dtd html 3.0//', - '-//ietf//dtd html 3.2 final//', - '-//ietf//dtd html 3.2//', - '-//ietf//dtd html 3//', - '-//ietf//dtd html level 0//', - '-//ietf//dtd html level 1//', - '-//ietf//dtd html level 2//', - '-//ietf//dtd html level 3//', - '-//ietf//dtd html strict level 0//', - '-//ietf//dtd html strict level 1//', - '-//ietf//dtd html strict level 2//', - '-//ietf//dtd html strict level 3//', - '-//ietf//dtd html strict//', - '-//ietf//dtd html//', - '-//metrius//dtd metrius presentational//', - '-//microsoft//dtd internet explorer 2.0 html strict//', - '-//microsoft//dtd internet explorer 2.0 html//', - '-//microsoft//dtd internet explorer 2.0 tables//', - '-//microsoft//dtd internet explorer 3.0 html strict//', - '-//microsoft//dtd internet explorer 3.0 html//', - '-//microsoft//dtd internet explorer 3.0 tables//', - '-//netscape comm. corp.//dtd html//', - '-//netscape comm. corp.//dtd strict html//', - "-//o'reilly and associates//dtd html 2.0//", - "-//o'reilly and associates//dtd html extended 1.0//", - "-//o'reilly and associates//dtd html extended relaxed 1.0//", - '-//sq//dtd html 2.0 hotmetal + extensions//', - '-//softquad software//dtd hotmetal pro 6.0::19990601::extensions to html 4.0//', - '-//softquad//dtd hotmetal pro 4.0::19971010::extensions to html 4.0//', - '-//spyglass//dtd html 2.0 extended//', - '-//sun microsystems corp.//dtd hotjava html//', - '-//sun microsystems corp.//dtd hotjava strict html//', - '-//w3c//dtd html 3 1995-03-24//', - '-//w3c//dtd html 3.2 draft//', - '-//w3c//dtd html 3.2 final//', - '-//w3c//dtd html 3.2//', - '-//w3c//dtd html 3.2s draft//', - '-//w3c//dtd html 4.0 frameset//', - '-//w3c//dtd html 4.0 transitional//', - '-//w3c//dtd html experimental 19960712//', - '-//w3c//dtd html experimental 970421//', - '-//w3c//dtd w3 html//', - '-//w3o//dtd w3 html 3.0//', - '-//webtechs//dtd mozilla html 2.0//', - '-//webtechs//dtd mozilla html//' -]; - -const QUIRKS_MODE_NO_SYSTEM_ID_PUBLIC_ID_PREFIXES = QUIRKS_MODE_PUBLIC_ID_PREFIXES.concat([ - '-//w3c//dtd html 4.01 frameset//', - '-//w3c//dtd html 4.01 transitional//' -]); - -const QUIRKS_MODE_PUBLIC_IDS = ['-//w3o//dtd w3 html strict 3.0//en//', '-/w3c/dtd html 4.0 transitional/en', 'html']; -const LIMITED_QUIRKS_PUBLIC_ID_PREFIXES = ['-//w3c//dtd xhtml 1.0 frameset//', '-//w3c//dtd xhtml 1.0 transitional//']; - -const LIMITED_QUIRKS_WITH_SYSTEM_ID_PUBLIC_ID_PREFIXES = LIMITED_QUIRKS_PUBLIC_ID_PREFIXES.concat([ - '-//w3c//dtd html 4.01 frameset//', - '-//w3c//dtd html 4.01 transitional//' -]); - -//Utils -function enquoteDoctypeId(id) { - const quote = id.indexOf('"') !== -1 ? "'" : '"'; - - return quote + id + quote; -} - -function hasPrefix(publicId, prefixes) { - for (let i = 0; i < prefixes.length; i++) { - if (publicId.indexOf(prefixes[i]) === 0) { - return true; - } - } - - return false; -} - -//API -exports.isConforming = function(token) { - return ( - token.name === VALID_DOCTYPE_NAME && - token.publicId === null && - (token.systemId === null || token.systemId === VALID_SYSTEM_ID) - ); -}; - -exports.getDocumentMode = function(token) { - if (token.name !== VALID_DOCTYPE_NAME) { - return DOCUMENT_MODE.QUIRKS; - } - - const systemId = token.systemId; - - if (systemId && systemId.toLowerCase() === QUIRKS_MODE_SYSTEM_ID) { - return DOCUMENT_MODE.QUIRKS; - } - - let publicId = token.publicId; - - if (publicId !== null) { - publicId = publicId.toLowerCase(); - - if (QUIRKS_MODE_PUBLIC_IDS.indexOf(publicId) > -1) { - return DOCUMENT_MODE.QUIRKS; - } - - let prefixes = systemId === null ? QUIRKS_MODE_NO_SYSTEM_ID_PUBLIC_ID_PREFIXES : QUIRKS_MODE_PUBLIC_ID_PREFIXES; - - if (hasPrefix(publicId, prefixes)) { - return DOCUMENT_MODE.QUIRKS; - } - - prefixes = - systemId === null ? LIMITED_QUIRKS_PUBLIC_ID_PREFIXES : LIMITED_QUIRKS_WITH_SYSTEM_ID_PUBLIC_ID_PREFIXES; - - if (hasPrefix(publicId, prefixes)) { - return DOCUMENT_MODE.LIMITED_QUIRKS; - } - } - - return DOCUMENT_MODE.NO_QUIRKS; -}; - -exports.serializeContent = function(name, publicId, systemId) { - let str = '!DOCTYPE '; - - if (name) { - str += name; - } - - if (publicId) { - str += ' PUBLIC ' + enquoteDoctypeId(publicId); - } else if (systemId) { - str += ' SYSTEM'; - } - - if (systemId !== null) { - str += ' ' + enquoteDoctypeId(systemId); - } - - return str; -}; - - -/***/ }), - -/***/ 65510: -/***/ ((module) => { - -"use strict"; - - -module.exports = { - controlCharacterInInputStream: 'control-character-in-input-stream', - noncharacterInInputStream: 'noncharacter-in-input-stream', - surrogateInInputStream: 'surrogate-in-input-stream', - nonVoidHtmlElementStartTagWithTrailingSolidus: 'non-void-html-element-start-tag-with-trailing-solidus', - endTagWithAttributes: 'end-tag-with-attributes', - endTagWithTrailingSolidus: 'end-tag-with-trailing-solidus', - unexpectedSolidusInTag: 'unexpected-solidus-in-tag', - unexpectedNullCharacter: 'unexpected-null-character', - unexpectedQuestionMarkInsteadOfTagName: 'unexpected-question-mark-instead-of-tag-name', - invalidFirstCharacterOfTagName: 'invalid-first-character-of-tag-name', - unexpectedEqualsSignBeforeAttributeName: 'unexpected-equals-sign-before-attribute-name', - missingEndTagName: 'missing-end-tag-name', - unexpectedCharacterInAttributeName: 'unexpected-character-in-attribute-name', - unknownNamedCharacterReference: 'unknown-named-character-reference', - missingSemicolonAfterCharacterReference: 'missing-semicolon-after-character-reference', - unexpectedCharacterAfterDoctypeSystemIdentifier: 'unexpected-character-after-doctype-system-identifier', - unexpectedCharacterInUnquotedAttributeValue: 'unexpected-character-in-unquoted-attribute-value', - eofBeforeTagName: 'eof-before-tag-name', - eofInTag: 'eof-in-tag', - missingAttributeValue: 'missing-attribute-value', - missingWhitespaceBetweenAttributes: 'missing-whitespace-between-attributes', - missingWhitespaceAfterDoctypePublicKeyword: 'missing-whitespace-after-doctype-public-keyword', - missingWhitespaceBetweenDoctypePublicAndSystemIdentifiers: - 'missing-whitespace-between-doctype-public-and-system-identifiers', - missingWhitespaceAfterDoctypeSystemKeyword: 'missing-whitespace-after-doctype-system-keyword', - missingQuoteBeforeDoctypePublicIdentifier: 'missing-quote-before-doctype-public-identifier', - missingQuoteBeforeDoctypeSystemIdentifier: 'missing-quote-before-doctype-system-identifier', - missingDoctypePublicIdentifier: 'missing-doctype-public-identifier', - missingDoctypeSystemIdentifier: 'missing-doctype-system-identifier', - abruptDoctypePublicIdentifier: 'abrupt-doctype-public-identifier', - abruptDoctypeSystemIdentifier: 'abrupt-doctype-system-identifier', - cdataInHtmlContent: 'cdata-in-html-content', - incorrectlyOpenedComment: 'incorrectly-opened-comment', - eofInScriptHtmlCommentLikeText: 'eof-in-script-html-comment-like-text', - eofInDoctype: 'eof-in-doctype', - nestedComment: 'nested-comment', - abruptClosingOfEmptyComment: 'abrupt-closing-of-empty-comment', - eofInComment: 'eof-in-comment', - incorrectlyClosedComment: 'incorrectly-closed-comment', - eofInCdata: 'eof-in-cdata', - absenceOfDigitsInNumericCharacterReference: 'absence-of-digits-in-numeric-character-reference', - nullCharacterReference: 'null-character-reference', - surrogateCharacterReference: 'surrogate-character-reference', - characterReferenceOutsideUnicodeRange: 'character-reference-outside-unicode-range', - controlCharacterReference: 'control-character-reference', - noncharacterCharacterReference: 'noncharacter-character-reference', - missingWhitespaceBeforeDoctypeName: 'missing-whitespace-before-doctype-name', - missingDoctypeName: 'missing-doctype-name', - invalidCharacterSequenceAfterDoctypeName: 'invalid-character-sequence-after-doctype-name', - duplicateAttribute: 'duplicate-attribute', - nonConformingDoctype: 'non-conforming-doctype', - missingDoctype: 'missing-doctype', - misplacedDoctype: 'misplaced-doctype', - endTagWithoutMatchingOpenElement: 'end-tag-without-matching-open-element', - closingOfElementWithOpenChildElements: 'closing-of-element-with-open-child-elements', - disallowedContentInNoscriptInHead: 'disallowed-content-in-noscript-in-head', - openElementsLeftAfterEof: 'open-elements-left-after-eof', - abandonedHeadElementChild: 'abandoned-head-element-child', - misplacedStartTagForHeadElement: 'misplaced-start-tag-for-head-element', - nestedNoscriptInHead: 'nested-noscript-in-head', - eofInElementThatCanContainOnlyText: 'eof-in-element-that-can-contain-only-text' -}; - - -/***/ }), - -/***/ 24320: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -const Tokenizer = __nccwpck_require__(66480); -const HTML = __nccwpck_require__(49578); - -//Aliases -const $ = HTML.TAG_NAMES; -const NS = HTML.NAMESPACES; -const ATTRS = HTML.ATTRS; - -//MIME types -const MIME_TYPES = { - TEXT_HTML: 'text/html', - APPLICATION_XML: 'application/xhtml+xml' -}; - -//Attributes -const DEFINITION_URL_ATTR = 'definitionurl'; -const ADJUSTED_DEFINITION_URL_ATTR = 'definitionURL'; -const SVG_ATTRS_ADJUSTMENT_MAP = { - attributename: 'attributeName', - attributetype: 'attributeType', - basefrequency: 'baseFrequency', - baseprofile: 'baseProfile', - calcmode: 'calcMode', - clippathunits: 'clipPathUnits', - diffuseconstant: 'diffuseConstant', - edgemode: 'edgeMode', - filterunits: 'filterUnits', - glyphref: 'glyphRef', - gradienttransform: 'gradientTransform', - gradientunits: 'gradientUnits', - kernelmatrix: 'kernelMatrix', - kernelunitlength: 'kernelUnitLength', - keypoints: 'keyPoints', - keysplines: 'keySplines', - keytimes: 'keyTimes', - lengthadjust: 'lengthAdjust', - limitingconeangle: 'limitingConeAngle', - markerheight: 'markerHeight', - markerunits: 'markerUnits', - markerwidth: 'markerWidth', - maskcontentunits: 'maskContentUnits', - maskunits: 'maskUnits', - numoctaves: 'numOctaves', - pathlength: 'pathLength', - patterncontentunits: 'patternContentUnits', - patterntransform: 'patternTransform', - patternunits: 'patternUnits', - pointsatx: 'pointsAtX', - pointsaty: 'pointsAtY', - pointsatz: 'pointsAtZ', - preservealpha: 'preserveAlpha', - preserveaspectratio: 'preserveAspectRatio', - primitiveunits: 'primitiveUnits', - refx: 'refX', - refy: 'refY', - repeatcount: 'repeatCount', - repeatdur: 'repeatDur', - requiredextensions: 'requiredExtensions', - requiredfeatures: 'requiredFeatures', - specularconstant: 'specularConstant', - specularexponent: 'specularExponent', - spreadmethod: 'spreadMethod', - startoffset: 'startOffset', - stddeviation: 'stdDeviation', - stitchtiles: 'stitchTiles', - surfacescale: 'surfaceScale', - systemlanguage: 'systemLanguage', - tablevalues: 'tableValues', - targetx: 'targetX', - targety: 'targetY', - textlength: 'textLength', - viewbox: 'viewBox', - viewtarget: 'viewTarget', - xchannelselector: 'xChannelSelector', - ychannelselector: 'yChannelSelector', - zoomandpan: 'zoomAndPan' -}; - -const XML_ATTRS_ADJUSTMENT_MAP = { - 'xlink:actuate': { prefix: 'xlink', name: 'actuate', namespace: NS.XLINK }, - 'xlink:arcrole': { prefix: 'xlink', name: 'arcrole', namespace: NS.XLINK }, - 'xlink:href': { prefix: 'xlink', name: 'href', namespace: NS.XLINK }, - 'xlink:role': { prefix: 'xlink', name: 'role', namespace: NS.XLINK }, - 'xlink:show': { prefix: 'xlink', name: 'show', namespace: NS.XLINK }, - 'xlink:title': { prefix: 'xlink', name: 'title', namespace: NS.XLINK }, - 'xlink:type': { prefix: 'xlink', name: 'type', namespace: NS.XLINK }, - 'xml:base': { prefix: 'xml', name: 'base', namespace: NS.XML }, - 'xml:lang': { prefix: 'xml', name: 'lang', namespace: NS.XML }, - 'xml:space': { prefix: 'xml', name: 'space', namespace: NS.XML }, - xmlns: { prefix: '', name: 'xmlns', namespace: NS.XMLNS }, - 'xmlns:xlink': { prefix: 'xmlns', name: 'xlink', namespace: NS.XMLNS } -}; - -//SVG tag names adjustment map -const SVG_TAG_NAMES_ADJUSTMENT_MAP = (exports.SVG_TAG_NAMES_ADJUSTMENT_MAP = { - altglyph: 'altGlyph', - altglyphdef: 'altGlyphDef', - altglyphitem: 'altGlyphItem', - animatecolor: 'animateColor', - animatemotion: 'animateMotion', - animatetransform: 'animateTransform', - clippath: 'clipPath', - feblend: 'feBlend', - fecolormatrix: 'feColorMatrix', - fecomponenttransfer: 'feComponentTransfer', - fecomposite: 'feComposite', - feconvolvematrix: 'feConvolveMatrix', - fediffuselighting: 'feDiffuseLighting', - fedisplacementmap: 'feDisplacementMap', - fedistantlight: 'feDistantLight', - feflood: 'feFlood', - fefunca: 'feFuncA', - fefuncb: 'feFuncB', - fefuncg: 'feFuncG', - fefuncr: 'feFuncR', - fegaussianblur: 'feGaussianBlur', - feimage: 'feImage', - femerge: 'feMerge', - femergenode: 'feMergeNode', - femorphology: 'feMorphology', - feoffset: 'feOffset', - fepointlight: 'fePointLight', - fespecularlighting: 'feSpecularLighting', - fespotlight: 'feSpotLight', - fetile: 'feTile', - feturbulence: 'feTurbulence', - foreignobject: 'foreignObject', - glyphref: 'glyphRef', - lineargradient: 'linearGradient', - radialgradient: 'radialGradient', - textpath: 'textPath' -}); - -//Tags that causes exit from foreign content -const EXITS_FOREIGN_CONTENT = { - [$.B]: true, - [$.BIG]: true, - [$.BLOCKQUOTE]: true, - [$.BODY]: true, - [$.BR]: true, - [$.CENTER]: true, - [$.CODE]: true, - [$.DD]: true, - [$.DIV]: true, - [$.DL]: true, - [$.DT]: true, - [$.EM]: true, - [$.EMBED]: true, - [$.H1]: true, - [$.H2]: true, - [$.H3]: true, - [$.H4]: true, - [$.H5]: true, - [$.H6]: true, - [$.HEAD]: true, - [$.HR]: true, - [$.I]: true, - [$.IMG]: true, - [$.LI]: true, - [$.LISTING]: true, - [$.MENU]: true, - [$.META]: true, - [$.NOBR]: true, - [$.OL]: true, - [$.P]: true, - [$.PRE]: true, - [$.RUBY]: true, - [$.S]: true, - [$.SMALL]: true, - [$.SPAN]: true, - [$.STRONG]: true, - [$.STRIKE]: true, - [$.SUB]: true, - [$.SUP]: true, - [$.TABLE]: true, - [$.TT]: true, - [$.U]: true, - [$.UL]: true, - [$.VAR]: true -}; - -//Check exit from foreign content -exports.causesExit = function(startTagToken) { - const tn = startTagToken.tagName; - const isFontWithAttrs = - tn === $.FONT && - (Tokenizer.getTokenAttr(startTagToken, ATTRS.COLOR) !== null || - Tokenizer.getTokenAttr(startTagToken, ATTRS.SIZE) !== null || - Tokenizer.getTokenAttr(startTagToken, ATTRS.FACE) !== null); - - return isFontWithAttrs ? true : EXITS_FOREIGN_CONTENT[tn]; -}; - -//Token adjustments -exports.adjustTokenMathMLAttrs = function(token) { - for (let i = 0; i < token.attrs.length; i++) { - if (token.attrs[i].name === DEFINITION_URL_ATTR) { - token.attrs[i].name = ADJUSTED_DEFINITION_URL_ATTR; - break; - } - } -}; - -exports.adjustTokenSVGAttrs = function(token) { - for (let i = 0; i < token.attrs.length; i++) { - const adjustedAttrName = SVG_ATTRS_ADJUSTMENT_MAP[token.attrs[i].name]; - - if (adjustedAttrName) { - token.attrs[i].name = adjustedAttrName; - } - } -}; - -exports.adjustTokenXMLAttrs = function(token) { - for (let i = 0; i < token.attrs.length; i++) { - const adjustedAttrEntry = XML_ATTRS_ADJUSTMENT_MAP[token.attrs[i].name]; - - if (adjustedAttrEntry) { - token.attrs[i].prefix = adjustedAttrEntry.prefix; - token.attrs[i].name = adjustedAttrEntry.name; - token.attrs[i].namespace = adjustedAttrEntry.namespace; - } - } -}; - -exports.adjustTokenSVGTagName = function(token) { - const adjustedTagName = SVG_TAG_NAMES_ADJUSTMENT_MAP[token.tagName]; - - if (adjustedTagName) { - token.tagName = adjustedTagName; - } -}; - -//Integration points -function isMathMLTextIntegrationPoint(tn, ns) { - return ns === NS.MATHML && (tn === $.MI || tn === $.MO || tn === $.MN || tn === $.MS || tn === $.MTEXT); -} - -function isHtmlIntegrationPoint(tn, ns, attrs) { - if (ns === NS.MATHML && tn === $.ANNOTATION_XML) { - for (let i = 0; i < attrs.length; i++) { - if (attrs[i].name === ATTRS.ENCODING) { - const value = attrs[i].value.toLowerCase(); - - return value === MIME_TYPES.TEXT_HTML || value === MIME_TYPES.APPLICATION_XML; - } - } - } - - return ns === NS.SVG && (tn === $.FOREIGN_OBJECT || tn === $.DESC || tn === $.TITLE); -} - -exports.isIntegrationPoint = function(tn, ns, attrs, foreignNS) { - if ((!foreignNS || foreignNS === NS.HTML) && isHtmlIntegrationPoint(tn, ns, attrs)) { - return true; - } - - if ((!foreignNS || foreignNS === NS.MATHML) && isMathMLTextIntegrationPoint(tn, ns)) { - return true; - } - - return false; -}; - - -/***/ }), - -/***/ 49578: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -const NS = (exports.NAMESPACES = { - HTML: 'http://www.w3.org/1999/xhtml', - MATHML: 'http://www.w3.org/1998/Math/MathML', - SVG: 'http://www.w3.org/2000/svg', - XLINK: 'http://www.w3.org/1999/xlink', - XML: 'http://www.w3.org/XML/1998/namespace', - XMLNS: 'http://www.w3.org/2000/xmlns/' -}); - -exports.ATTRS = { - TYPE: 'type', - ACTION: 'action', - ENCODING: 'encoding', - PROMPT: 'prompt', - NAME: 'name', - COLOR: 'color', - FACE: 'face', - SIZE: 'size' -}; - -exports.DOCUMENT_MODE = { - NO_QUIRKS: 'no-quirks', - QUIRKS: 'quirks', - LIMITED_QUIRKS: 'limited-quirks' -}; - -const $ = (exports.TAG_NAMES = { - A: 'a', - ADDRESS: 'address', - ANNOTATION_XML: 'annotation-xml', - APPLET: 'applet', - AREA: 'area', - ARTICLE: 'article', - ASIDE: 'aside', - - B: 'b', - BASE: 'base', - BASEFONT: 'basefont', - BGSOUND: 'bgsound', - BIG: 'big', - BLOCKQUOTE: 'blockquote', - BODY: 'body', - BR: 'br', - BUTTON: 'button', - - CAPTION: 'caption', - CENTER: 'center', - CODE: 'code', - COL: 'col', - COLGROUP: 'colgroup', - - DD: 'dd', - DESC: 'desc', - DETAILS: 'details', - DIALOG: 'dialog', - DIR: 'dir', - DIV: 'div', - DL: 'dl', - DT: 'dt', - - EM: 'em', - EMBED: 'embed', - - FIELDSET: 'fieldset', - FIGCAPTION: 'figcaption', - FIGURE: 'figure', - FONT: 'font', - FOOTER: 'footer', - FOREIGN_OBJECT: 'foreignObject', - FORM: 'form', - FRAME: 'frame', - FRAMESET: 'frameset', - - H1: 'h1', - H2: 'h2', - H3: 'h3', - H4: 'h4', - H5: 'h5', - H6: 'h6', - HEAD: 'head', - HEADER: 'header', - HGROUP: 'hgroup', - HR: 'hr', - HTML: 'html', - - I: 'i', - IMG: 'img', - IMAGE: 'image', - INPUT: 'input', - IFRAME: 'iframe', - - KEYGEN: 'keygen', - - LABEL: 'label', - LI: 'li', - LINK: 'link', - LISTING: 'listing', - - MAIN: 'main', - MALIGNMARK: 'malignmark', - MARQUEE: 'marquee', - MATH: 'math', - MENU: 'menu', - META: 'meta', - MGLYPH: 'mglyph', - MI: 'mi', - MO: 'mo', - MN: 'mn', - MS: 'ms', - MTEXT: 'mtext', - - NAV: 'nav', - NOBR: 'nobr', - NOFRAMES: 'noframes', - NOEMBED: 'noembed', - NOSCRIPT: 'noscript', - - OBJECT: 'object', - OL: 'ol', - OPTGROUP: 'optgroup', - OPTION: 'option', - - P: 'p', - PARAM: 'param', - PLAINTEXT: 'plaintext', - PRE: 'pre', - - RB: 'rb', - RP: 'rp', - RT: 'rt', - RTC: 'rtc', - RUBY: 'ruby', - - S: 's', - SCRIPT: 'script', - SECTION: 'section', - SELECT: 'select', - SOURCE: 'source', - SMALL: 'small', - SPAN: 'span', - STRIKE: 'strike', - STRONG: 'strong', - STYLE: 'style', - SUB: 'sub', - SUMMARY: 'summary', - SUP: 'sup', - - TABLE: 'table', - TBODY: 'tbody', - TEMPLATE: 'template', - TEXTAREA: 'textarea', - TFOOT: 'tfoot', - TD: 'td', - TH: 'th', - THEAD: 'thead', - TITLE: 'title', - TR: 'tr', - TRACK: 'track', - TT: 'tt', - - U: 'u', - UL: 'ul', - - SVG: 'svg', - - VAR: 'var', - - WBR: 'wbr', - - XMP: 'xmp' -}); - -exports.SPECIAL_ELEMENTS = { - [NS.HTML]: { - [$.ADDRESS]: true, - [$.APPLET]: true, - [$.AREA]: true, - [$.ARTICLE]: true, - [$.ASIDE]: true, - [$.BASE]: true, - [$.BASEFONT]: true, - [$.BGSOUND]: true, - [$.BLOCKQUOTE]: true, - [$.BODY]: true, - [$.BR]: true, - [$.BUTTON]: true, - [$.CAPTION]: true, - [$.CENTER]: true, - [$.COL]: true, - [$.COLGROUP]: true, - [$.DD]: true, - [$.DETAILS]: true, - [$.DIR]: true, - [$.DIV]: true, - [$.DL]: true, - [$.DT]: true, - [$.EMBED]: true, - [$.FIELDSET]: true, - [$.FIGCAPTION]: true, - [$.FIGURE]: true, - [$.FOOTER]: true, - [$.FORM]: true, - [$.FRAME]: true, - [$.FRAMESET]: true, - [$.H1]: true, - [$.H2]: true, - [$.H3]: true, - [$.H4]: true, - [$.H5]: true, - [$.H6]: true, - [$.HEAD]: true, - [$.HEADER]: true, - [$.HGROUP]: true, - [$.HR]: true, - [$.HTML]: true, - [$.IFRAME]: true, - [$.IMG]: true, - [$.INPUT]: true, - [$.LI]: true, - [$.LINK]: true, - [$.LISTING]: true, - [$.MAIN]: true, - [$.MARQUEE]: true, - [$.MENU]: true, - [$.META]: true, - [$.NAV]: true, - [$.NOEMBED]: true, - [$.NOFRAMES]: true, - [$.NOSCRIPT]: true, - [$.OBJECT]: true, - [$.OL]: true, - [$.P]: true, - [$.PARAM]: true, - [$.PLAINTEXT]: true, - [$.PRE]: true, - [$.SCRIPT]: true, - [$.SECTION]: true, - [$.SELECT]: true, - [$.SOURCE]: true, - [$.STYLE]: true, - [$.SUMMARY]: true, - [$.TABLE]: true, - [$.TBODY]: true, - [$.TD]: true, - [$.TEMPLATE]: true, - [$.TEXTAREA]: true, - [$.TFOOT]: true, - [$.TH]: true, - [$.THEAD]: true, - [$.TITLE]: true, - [$.TR]: true, - [$.TRACK]: true, - [$.UL]: true, - [$.WBR]: true, - [$.XMP]: true - }, - [NS.MATHML]: { - [$.MI]: true, - [$.MO]: true, - [$.MN]: true, - [$.MS]: true, - [$.MTEXT]: true, - [$.ANNOTATION_XML]: true - }, - [NS.SVG]: { - [$.TITLE]: true, - [$.FOREIGN_OBJECT]: true, - [$.DESC]: true - } -}; - - -/***/ }), - -/***/ 4736: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -const UNDEFINED_CODE_POINTS = [ - 0xfffe, - 0xffff, - 0x1fffe, - 0x1ffff, - 0x2fffe, - 0x2ffff, - 0x3fffe, - 0x3ffff, - 0x4fffe, - 0x4ffff, - 0x5fffe, - 0x5ffff, - 0x6fffe, - 0x6ffff, - 0x7fffe, - 0x7ffff, - 0x8fffe, - 0x8ffff, - 0x9fffe, - 0x9ffff, - 0xafffe, - 0xaffff, - 0xbfffe, - 0xbffff, - 0xcfffe, - 0xcffff, - 0xdfffe, - 0xdffff, - 0xefffe, - 0xeffff, - 0xffffe, - 0xfffff, - 0x10fffe, - 0x10ffff -]; - -exports.REPLACEMENT_CHARACTER = '\uFFFD'; - -exports.CODE_POINTS = { - EOF: -1, - NULL: 0x00, - TABULATION: 0x09, - CARRIAGE_RETURN: 0x0d, - LINE_FEED: 0x0a, - FORM_FEED: 0x0c, - SPACE: 0x20, - EXCLAMATION_MARK: 0x21, - QUOTATION_MARK: 0x22, - NUMBER_SIGN: 0x23, - AMPERSAND: 0x26, - APOSTROPHE: 0x27, - HYPHEN_MINUS: 0x2d, - SOLIDUS: 0x2f, - DIGIT_0: 0x30, - DIGIT_9: 0x39, - SEMICOLON: 0x3b, - LESS_THAN_SIGN: 0x3c, - EQUALS_SIGN: 0x3d, - GREATER_THAN_SIGN: 0x3e, - QUESTION_MARK: 0x3f, - LATIN_CAPITAL_A: 0x41, - LATIN_CAPITAL_F: 0x46, - LATIN_CAPITAL_X: 0x58, - LATIN_CAPITAL_Z: 0x5a, - RIGHT_SQUARE_BRACKET: 0x5d, - GRAVE_ACCENT: 0x60, - LATIN_SMALL_A: 0x61, - LATIN_SMALL_F: 0x66, - LATIN_SMALL_X: 0x78, - LATIN_SMALL_Z: 0x7a, - REPLACEMENT_CHARACTER: 0xfffd -}; - -exports.CODE_POINT_SEQUENCES = { - DASH_DASH_STRING: [0x2d, 0x2d], //-- - DOCTYPE_STRING: [0x44, 0x4f, 0x43, 0x54, 0x59, 0x50, 0x45], //DOCTYPE - CDATA_START_STRING: [0x5b, 0x43, 0x44, 0x41, 0x54, 0x41, 0x5b], //[CDATA[ - SCRIPT_STRING: [0x73, 0x63, 0x72, 0x69, 0x70, 0x74], //script - PUBLIC_STRING: [0x50, 0x55, 0x42, 0x4c, 0x49, 0x43], //PUBLIC - SYSTEM_STRING: [0x53, 0x59, 0x53, 0x54, 0x45, 0x4d] //SYSTEM -}; - -//Surrogates -exports.isSurrogate = function(cp) { - return cp >= 0xd800 && cp <= 0xdfff; -}; - -exports.isSurrogatePair = function(cp) { - return cp >= 0xdc00 && cp <= 0xdfff; -}; - -exports.getSurrogatePairCodePoint = function(cp1, cp2) { - return (cp1 - 0xd800) * 0x400 + 0x2400 + cp2; -}; - -//NOTE: excluding NULL and ASCII whitespace -exports.isControlCodePoint = function(cp) { - return ( - (cp !== 0x20 && cp !== 0x0a && cp !== 0x0d && cp !== 0x09 && cp !== 0x0c && cp >= 0x01 && cp <= 0x1f) || - (cp >= 0x7f && cp <= 0x9f) - ); -}; - -exports.isUndefinedCodePoint = function(cp) { - return (cp >= 0xfdd0 && cp <= 0xfdef) || UNDEFINED_CODE_POINTS.indexOf(cp) > -1; -}; - - -/***/ }), - -/***/ 45686: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const Mixin = __nccwpck_require__(2830); - -class ErrorReportingMixinBase extends Mixin { - constructor(host, opts) { - super(host); - - this.posTracker = null; - this.onParseError = opts.onParseError; - } - - _setErrorLocation(err) { - err.startLine = err.endLine = this.posTracker.line; - err.startCol = err.endCol = this.posTracker.col; - err.startOffset = err.endOffset = this.posTracker.offset; - } - - _reportError(code) { - const err = { - code: code, - startLine: -1, - startCol: -1, - startOffset: -1, - endLine: -1, - endCol: -1, - endOffset: -1 - }; - - this._setErrorLocation(err); - this.onParseError(err); - } - - _getOverriddenMethods(mxn) { - return { - _err(code) { - mxn._reportError(code); - } - }; - } -} - -module.exports = ErrorReportingMixinBase; - - -/***/ }), - -/***/ 60393: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const ErrorReportingMixinBase = __nccwpck_require__(45686); -const ErrorReportingTokenizerMixin = __nccwpck_require__(14703); -const LocationInfoTokenizerMixin = __nccwpck_require__(29817); -const Mixin = __nccwpck_require__(2830); - -class ErrorReportingParserMixin extends ErrorReportingMixinBase { - constructor(parser, opts) { - super(parser, opts); - - this.opts = opts; - this.ctLoc = null; - this.locBeforeToken = false; - } - - _setErrorLocation(err) { - if (this.ctLoc) { - err.startLine = this.ctLoc.startLine; - err.startCol = this.ctLoc.startCol; - err.startOffset = this.ctLoc.startOffset; - - err.endLine = this.locBeforeToken ? this.ctLoc.startLine : this.ctLoc.endLine; - err.endCol = this.locBeforeToken ? this.ctLoc.startCol : this.ctLoc.endCol; - err.endOffset = this.locBeforeToken ? this.ctLoc.startOffset : this.ctLoc.endOffset; - } - } - - _getOverriddenMethods(mxn, orig) { - return { - _bootstrap(document, fragmentContext) { - orig._bootstrap.call(this, document, fragmentContext); - - Mixin.install(this.tokenizer, ErrorReportingTokenizerMixin, mxn.opts); - Mixin.install(this.tokenizer, LocationInfoTokenizerMixin); - }, - - _processInputToken(token) { - mxn.ctLoc = token.location; - - orig._processInputToken.call(this, token); - }, - - _err(code, options) { - mxn.locBeforeToken = options && options.beforeToken; - mxn._reportError(code); - } - }; - } -} - -module.exports = ErrorReportingParserMixin; - - -/***/ }), - -/***/ 96573: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const ErrorReportingMixinBase = __nccwpck_require__(45686); -const PositionTrackingPreprocessorMixin = __nccwpck_require__(3127); -const Mixin = __nccwpck_require__(2830); - -class ErrorReportingPreprocessorMixin extends ErrorReportingMixinBase { - constructor(preprocessor, opts) { - super(preprocessor, opts); - - this.posTracker = Mixin.install(preprocessor, PositionTrackingPreprocessorMixin); - this.lastErrOffset = -1; - } - - _reportError(code) { - //NOTE: avoid reporting error twice on advance/retreat - if (this.lastErrOffset !== this.posTracker.offset) { - this.lastErrOffset = this.posTracker.offset; - super._reportError(code); - } - } -} - -module.exports = ErrorReportingPreprocessorMixin; - - -/***/ }), - -/***/ 14703: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const ErrorReportingMixinBase = __nccwpck_require__(45686); -const ErrorReportingPreprocessorMixin = __nccwpck_require__(96573); -const Mixin = __nccwpck_require__(2830); - -class ErrorReportingTokenizerMixin extends ErrorReportingMixinBase { - constructor(tokenizer, opts) { - super(tokenizer, opts); - - const preprocessorMixin = Mixin.install(tokenizer.preprocessor, ErrorReportingPreprocessorMixin, opts); - - this.posTracker = preprocessorMixin.posTracker; - } -} - -module.exports = ErrorReportingTokenizerMixin; - - -/***/ }), - -/***/ 78020: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const Mixin = __nccwpck_require__(2830); - -class LocationInfoOpenElementStackMixin extends Mixin { - constructor(stack, opts) { - super(stack); - - this.onItemPop = opts.onItemPop; - } - - _getOverriddenMethods(mxn, orig) { - return { - pop() { - mxn.onItemPop(this.current); - orig.pop.call(this); - }, - - popAllUpToHtmlElement() { - for (let i = this.stackTop; i > 0; i--) { - mxn.onItemPop(this.items[i]); - } - - orig.popAllUpToHtmlElement.call(this); - }, - - remove(element) { - mxn.onItemPop(this.current); - orig.remove.call(this, element); - } - }; - } -} - -module.exports = LocationInfoOpenElementStackMixin; - - -/***/ }), - -/***/ 703: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const Mixin = __nccwpck_require__(2830); -const Tokenizer = __nccwpck_require__(66480); -const LocationInfoTokenizerMixin = __nccwpck_require__(29817); -const LocationInfoOpenElementStackMixin = __nccwpck_require__(78020); -const HTML = __nccwpck_require__(49578); - -//Aliases -const $ = HTML.TAG_NAMES; - -class LocationInfoParserMixin extends Mixin { - constructor(parser) { - super(parser); - - this.parser = parser; - this.treeAdapter = this.parser.treeAdapter; - this.posTracker = null; - this.lastStartTagToken = null; - this.lastFosterParentingLocation = null; - this.currentToken = null; - } - - _setStartLocation(element) { - let loc = null; - - if (this.lastStartTagToken) { - loc = Object.assign({}, this.lastStartTagToken.location); - loc.startTag = this.lastStartTagToken.location; - } - - this.treeAdapter.setNodeSourceCodeLocation(element, loc); - } - - _setEndLocation(element, closingToken) { - const loc = this.treeAdapter.getNodeSourceCodeLocation(element); - - if (loc) { - if (closingToken.location) { - const ctLoc = closingToken.location; - const tn = this.treeAdapter.getTagName(element); - - // NOTE: For cases like

- First 'p' closes without a closing - // tag and for cases like

- 'p' closes without a closing tag. - const isClosingEndTag = closingToken.type === Tokenizer.END_TAG_TOKEN && tn === closingToken.tagName; - - if (isClosingEndTag) { - loc.endTag = Object.assign({}, ctLoc); - loc.endLine = ctLoc.endLine; - loc.endCol = ctLoc.endCol; - loc.endOffset = ctLoc.endOffset; - } else { - loc.endLine = ctLoc.startLine; - loc.endCol = ctLoc.startCol; - loc.endOffset = ctLoc.startOffset; - } - } - } - } - - _getOverriddenMethods(mxn, orig) { - return { - _bootstrap(document, fragmentContext) { - orig._bootstrap.call(this, document, fragmentContext); - - mxn.lastStartTagToken = null; - mxn.lastFosterParentingLocation = null; - mxn.currentToken = null; - - const tokenizerMixin = Mixin.install(this.tokenizer, LocationInfoTokenizerMixin); - - mxn.posTracker = tokenizerMixin.posTracker; - - Mixin.install(this.openElements, LocationInfoOpenElementStackMixin, { - onItemPop: function(element) { - mxn._setEndLocation(element, mxn.currentToken); - } - }); - }, - - _runParsingLoop(scriptHandler) { - orig._runParsingLoop.call(this, scriptHandler); - - // NOTE: generate location info for elements - // that remains on open element stack - for (let i = this.openElements.stackTop; i >= 0; i--) { - mxn._setEndLocation(this.openElements.items[i], mxn.currentToken); - } - }, - - //Token processing - _processTokenInForeignContent(token) { - mxn.currentToken = token; - orig._processTokenInForeignContent.call(this, token); - }, - - _processToken(token) { - mxn.currentToken = token; - orig._processToken.call(this, token); - - //NOTE: and are never popped from the stack, so we need to updated - //their end location explicitly. - const requireExplicitUpdate = - token.type === Tokenizer.END_TAG_TOKEN && - (token.tagName === $.HTML || (token.tagName === $.BODY && this.openElements.hasInScope($.BODY))); - - if (requireExplicitUpdate) { - for (let i = this.openElements.stackTop; i >= 0; i--) { - const element = this.openElements.items[i]; - - if (this.treeAdapter.getTagName(element) === token.tagName) { - mxn._setEndLocation(element, token); - break; - } - } - } - }, - - //Doctype - _setDocumentType(token) { - orig._setDocumentType.call(this, token); - - const documentChildren = this.treeAdapter.getChildNodes(this.document); - const cnLength = documentChildren.length; - - for (let i = 0; i < cnLength; i++) { - const node = documentChildren[i]; - - if (this.treeAdapter.isDocumentTypeNode(node)) { - this.treeAdapter.setNodeSourceCodeLocation(node, token.location); - break; - } - } - }, - - //Elements - _attachElementToTree(element) { - //NOTE: _attachElementToTree is called from _appendElement, _insertElement and _insertTemplate methods. - //So we will use token location stored in this methods for the element. - mxn._setStartLocation(element); - mxn.lastStartTagToken = null; - orig._attachElementToTree.call(this, element); - }, - - _appendElement(token, namespaceURI) { - mxn.lastStartTagToken = token; - orig._appendElement.call(this, token, namespaceURI); - }, - - _insertElement(token, namespaceURI) { - mxn.lastStartTagToken = token; - orig._insertElement.call(this, token, namespaceURI); - }, - - _insertTemplate(token) { - mxn.lastStartTagToken = token; - orig._insertTemplate.call(this, token); - - const tmplContent = this.treeAdapter.getTemplateContent(this.openElements.current); - - this.treeAdapter.setNodeSourceCodeLocation(tmplContent, null); - }, - - _insertFakeRootElement() { - orig._insertFakeRootElement.call(this); - this.treeAdapter.setNodeSourceCodeLocation(this.openElements.current, null); - }, - - //Comments - _appendCommentNode(token, parent) { - orig._appendCommentNode.call(this, token, parent); - - const children = this.treeAdapter.getChildNodes(parent); - const commentNode = children[children.length - 1]; - - this.treeAdapter.setNodeSourceCodeLocation(commentNode, token.location); - }, - - //Text - _findFosterParentingLocation() { - //NOTE: store last foster parenting location, so we will be able to find inserted text - //in case of foster parenting - mxn.lastFosterParentingLocation = orig._findFosterParentingLocation.call(this); - - return mxn.lastFosterParentingLocation; - }, - - _insertCharacters(token) { - orig._insertCharacters.call(this, token); - - const hasFosterParent = this._shouldFosterParentOnInsertion(); - - const parent = - (hasFosterParent && mxn.lastFosterParentingLocation.parent) || - this.openElements.currentTmplContent || - this.openElements.current; - - const siblings = this.treeAdapter.getChildNodes(parent); - - const textNodeIdx = - hasFosterParent && mxn.lastFosterParentingLocation.beforeElement - ? siblings.indexOf(mxn.lastFosterParentingLocation.beforeElement) - 1 - : siblings.length - 1; - - const textNode = siblings[textNodeIdx]; - - //NOTE: if we have location assigned by another token, then just update end position - const tnLoc = this.treeAdapter.getNodeSourceCodeLocation(textNode); - - if (tnLoc) { - tnLoc.endLine = token.location.endLine; - tnLoc.endCol = token.location.endCol; - tnLoc.endOffset = token.location.endOffset; - } else { - this.treeAdapter.setNodeSourceCodeLocation(textNode, token.location); - } - } - }; - } -} - -module.exports = LocationInfoParserMixin; - - -/***/ }), - -/***/ 29817: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const Mixin = __nccwpck_require__(2830); -const Tokenizer = __nccwpck_require__(66480); -const PositionTrackingPreprocessorMixin = __nccwpck_require__(3127); - -class LocationInfoTokenizerMixin extends Mixin { - constructor(tokenizer) { - super(tokenizer); - - this.tokenizer = tokenizer; - this.posTracker = Mixin.install(tokenizer.preprocessor, PositionTrackingPreprocessorMixin); - this.currentAttrLocation = null; - this.ctLoc = null; - } - - _getCurrentLocation() { - return { - startLine: this.posTracker.line, - startCol: this.posTracker.col, - startOffset: this.posTracker.offset, - endLine: -1, - endCol: -1, - endOffset: -1 - }; - } - - _attachCurrentAttrLocationInfo() { - this.currentAttrLocation.endLine = this.posTracker.line; - this.currentAttrLocation.endCol = this.posTracker.col; - this.currentAttrLocation.endOffset = this.posTracker.offset; - - const currentToken = this.tokenizer.currentToken; - const currentAttr = this.tokenizer.currentAttr; - - if (!currentToken.location.attrs) { - currentToken.location.attrs = Object.create(null); - } - - currentToken.location.attrs[currentAttr.name] = this.currentAttrLocation; - } - - _getOverriddenMethods(mxn, orig) { - const methods = { - _createStartTagToken() { - orig._createStartTagToken.call(this); - this.currentToken.location = mxn.ctLoc; - }, - - _createEndTagToken() { - orig._createEndTagToken.call(this); - this.currentToken.location = mxn.ctLoc; - }, - - _createCommentToken() { - orig._createCommentToken.call(this); - this.currentToken.location = mxn.ctLoc; - }, - - _createDoctypeToken(initialName) { - orig._createDoctypeToken.call(this, initialName); - this.currentToken.location = mxn.ctLoc; - }, - - _createCharacterToken(type, ch) { - orig._createCharacterToken.call(this, type, ch); - this.currentCharacterToken.location = mxn.ctLoc; - }, - - _createEOFToken() { - orig._createEOFToken.call(this); - this.currentToken.location = mxn._getCurrentLocation(); - }, - - _createAttr(attrNameFirstCh) { - orig._createAttr.call(this, attrNameFirstCh); - mxn.currentAttrLocation = mxn._getCurrentLocation(); - }, - - _leaveAttrName(toState) { - orig._leaveAttrName.call(this, toState); - mxn._attachCurrentAttrLocationInfo(); - }, - - _leaveAttrValue(toState) { - orig._leaveAttrValue.call(this, toState); - mxn._attachCurrentAttrLocationInfo(); - }, - - _emitCurrentToken() { - const ctLoc = this.currentToken.location; - - //NOTE: if we have pending character token make it's end location equal to the - //current token's start location. - if (this.currentCharacterToken) { - this.currentCharacterToken.location.endLine = ctLoc.startLine; - this.currentCharacterToken.location.endCol = ctLoc.startCol; - this.currentCharacterToken.location.endOffset = ctLoc.startOffset; - } - - if (this.currentToken.type === Tokenizer.EOF_TOKEN) { - ctLoc.endLine = ctLoc.startLine; - ctLoc.endCol = ctLoc.startCol; - ctLoc.endOffset = ctLoc.startOffset; - } else { - ctLoc.endLine = mxn.posTracker.line; - ctLoc.endCol = mxn.posTracker.col + 1; - ctLoc.endOffset = mxn.posTracker.offset + 1; - } - - orig._emitCurrentToken.call(this); - }, - - _emitCurrentCharacterToken() { - const ctLoc = this.currentCharacterToken && this.currentCharacterToken.location; - - //NOTE: if we have character token and it's location wasn't set in the _emitCurrentToken(), - //then set it's location at the current preprocessor position. - //We don't need to increment preprocessor position, since character token - //emission is always forced by the start of the next character token here. - //So, we already have advanced position. - if (ctLoc && ctLoc.endOffset === -1) { - ctLoc.endLine = mxn.posTracker.line; - ctLoc.endCol = mxn.posTracker.col; - ctLoc.endOffset = mxn.posTracker.offset; - } - - orig._emitCurrentCharacterToken.call(this); - } - }; - - //NOTE: patch initial states for each mode to obtain token start position - Object.keys(Tokenizer.MODE).forEach(modeName => { - const state = Tokenizer.MODE[modeName]; - - methods[state] = function(cp) { - mxn.ctLoc = mxn._getCurrentLocation(); - orig[state].call(this, cp); - }; - }); - - return methods; - } -} - -module.exports = LocationInfoTokenizerMixin; - - -/***/ }), - -/***/ 3127: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const Mixin = __nccwpck_require__(2830); - -class PositionTrackingPreprocessorMixin extends Mixin { - constructor(preprocessor) { - super(preprocessor); - - this.preprocessor = preprocessor; - this.isEol = false; - this.lineStartPos = 0; - this.droppedBufferSize = 0; - - this.offset = 0; - this.col = 0; - this.line = 1; - } - - _getOverriddenMethods(mxn, orig) { - return { - advance() { - const pos = this.pos + 1; - const ch = this.html[pos]; - - //NOTE: LF should be in the last column of the line - if (mxn.isEol) { - mxn.isEol = false; - mxn.line++; - mxn.lineStartPos = pos; - } - - if (ch === '\n' || (ch === '\r' && this.html[pos + 1] !== '\n')) { - mxn.isEol = true; - } - - mxn.col = pos - mxn.lineStartPos + 1; - mxn.offset = mxn.droppedBufferSize + pos; - - return orig.advance.call(this); - }, - - retreat() { - orig.retreat.call(this); - - mxn.isEol = false; - mxn.col = this.pos - mxn.lineStartPos + 1; - }, - - dropParsedChunk() { - const prevPos = this.pos; - - orig.dropParsedChunk.call(this); - - const reduction = prevPos - this.pos; - - mxn.lineStartPos -= reduction; - mxn.droppedBufferSize += reduction; - mxn.offset = mxn.droppedBufferSize + this.pos; - } - }; - } -} - -module.exports = PositionTrackingPreprocessorMixin; - - -/***/ }), - -/***/ 88933: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -const Parser = __nccwpck_require__(66225); -const Serializer = __nccwpck_require__(14786); - -// Shorthands -exports.parse = function parse(html, options) { - const parser = new Parser(options); - - return parser.parse(html); -}; - -exports.parseFragment = function parseFragment(fragmentContext, html, options) { - if (typeof fragmentContext === 'string') { - options = html; - html = fragmentContext; - fragmentContext = null; - } - - const parser = new Parser(options); - - return parser.parseFragment(html, fragmentContext); -}; - -exports.serialize = function(node, options) { - const serializer = new Serializer(node, options); - - return serializer.serialize(); -}; - - -/***/ }), - -/***/ 9545: -/***/ ((module) => { - -"use strict"; - - -//Const -const NOAH_ARK_CAPACITY = 3; - -//List of formatting elements -class FormattingElementList { - constructor(treeAdapter) { - this.length = 0; - this.entries = []; - this.treeAdapter = treeAdapter; - this.bookmark = null; - } - - //Noah Ark's condition - //OPTIMIZATION: at first we try to find possible candidates for exclusion using - //lightweight heuristics without thorough attributes check. - _getNoahArkConditionCandidates(newElement) { - const candidates = []; - - if (this.length >= NOAH_ARK_CAPACITY) { - const neAttrsLength = this.treeAdapter.getAttrList(newElement).length; - const neTagName = this.treeAdapter.getTagName(newElement); - const neNamespaceURI = this.treeAdapter.getNamespaceURI(newElement); - - for (let i = this.length - 1; i >= 0; i--) { - const entry = this.entries[i]; - - if (entry.type === FormattingElementList.MARKER_ENTRY) { - break; - } - - const element = entry.element; - const elementAttrs = this.treeAdapter.getAttrList(element); - - const isCandidate = - this.treeAdapter.getTagName(element) === neTagName && - this.treeAdapter.getNamespaceURI(element) === neNamespaceURI && - elementAttrs.length === neAttrsLength; - - if (isCandidate) { - candidates.push({ idx: i, attrs: elementAttrs }); - } - } - } - - return candidates.length < NOAH_ARK_CAPACITY ? [] : candidates; - } - - _ensureNoahArkCondition(newElement) { - const candidates = this._getNoahArkConditionCandidates(newElement); - let cLength = candidates.length; - - if (cLength) { - const neAttrs = this.treeAdapter.getAttrList(newElement); - const neAttrsLength = neAttrs.length; - const neAttrsMap = Object.create(null); - - //NOTE: build attrs map for the new element so we can perform fast lookups - for (let i = 0; i < neAttrsLength; i++) { - const neAttr = neAttrs[i]; - - neAttrsMap[neAttr.name] = neAttr.value; - } - - for (let i = 0; i < neAttrsLength; i++) { - for (let j = 0; j < cLength; j++) { - const cAttr = candidates[j].attrs[i]; - - if (neAttrsMap[cAttr.name] !== cAttr.value) { - candidates.splice(j, 1); - cLength--; - } - - if (candidates.length < NOAH_ARK_CAPACITY) { - return; - } - } - } - - //NOTE: remove bottommost candidates until Noah's Ark condition will not be met - for (let i = cLength - 1; i >= NOAH_ARK_CAPACITY - 1; i--) { - this.entries.splice(candidates[i].idx, 1); - this.length--; - } - } - } - - //Mutations - insertMarker() { - this.entries.push({ type: FormattingElementList.MARKER_ENTRY }); - this.length++; - } - - pushElement(element, token) { - this._ensureNoahArkCondition(element); - - this.entries.push({ - type: FormattingElementList.ELEMENT_ENTRY, - element: element, - token: token - }); - - this.length++; - } - - insertElementAfterBookmark(element, token) { - let bookmarkIdx = this.length - 1; - - for (; bookmarkIdx >= 0; bookmarkIdx--) { - if (this.entries[bookmarkIdx] === this.bookmark) { - break; - } - } - - this.entries.splice(bookmarkIdx + 1, 0, { - type: FormattingElementList.ELEMENT_ENTRY, - element: element, - token: token - }); - - this.length++; - } - - removeEntry(entry) { - for (let i = this.length - 1; i >= 0; i--) { - if (this.entries[i] === entry) { - this.entries.splice(i, 1); - this.length--; - break; - } - } - } - - clearToLastMarker() { - while (this.length) { - const entry = this.entries.pop(); - - this.length--; - - if (entry.type === FormattingElementList.MARKER_ENTRY) { - break; - } - } - } - - //Search - getElementEntryInScopeWithTagName(tagName) { - for (let i = this.length - 1; i >= 0; i--) { - const entry = this.entries[i]; - - if (entry.type === FormattingElementList.MARKER_ENTRY) { - return null; - } - - if (this.treeAdapter.getTagName(entry.element) === tagName) { - return entry; - } - } - - return null; - } - - getElementEntry(element) { - for (let i = this.length - 1; i >= 0; i--) { - const entry = this.entries[i]; - - if (entry.type === FormattingElementList.ELEMENT_ENTRY && entry.element === element) { - return entry; - } - } - - return null; - } -} - -//Entry types -FormattingElementList.MARKER_ENTRY = 'MARKER_ENTRY'; -FormattingElementList.ELEMENT_ENTRY = 'ELEMENT_ENTRY'; - -module.exports = FormattingElementList; - - -/***/ }), - -/***/ 66225: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const Tokenizer = __nccwpck_require__(66480); -const OpenElementStack = __nccwpck_require__(31827); -const FormattingElementList = __nccwpck_require__(9545); -const LocationInfoParserMixin = __nccwpck_require__(703); -const ErrorReportingParserMixin = __nccwpck_require__(60393); -const Mixin = __nccwpck_require__(2830); -const defaultTreeAdapter = __nccwpck_require__(37269); -const mergeOptions = __nccwpck_require__(57382); -const doctype = __nccwpck_require__(94281); -const foreignContent = __nccwpck_require__(24320); -const ERR = __nccwpck_require__(65510); -const unicode = __nccwpck_require__(4736); -const HTML = __nccwpck_require__(49578); - -//Aliases -const $ = HTML.TAG_NAMES; -const NS = HTML.NAMESPACES; -const ATTRS = HTML.ATTRS; - -const DEFAULT_OPTIONS = { - scriptingEnabled: true, - sourceCodeLocationInfo: false, - onParseError: null, - treeAdapter: defaultTreeAdapter -}; - -//Misc constants -const HIDDEN_INPUT_TYPE = 'hidden'; - -//Adoption agency loops iteration count -const AA_OUTER_LOOP_ITER = 8; -const AA_INNER_LOOP_ITER = 3; - -//Insertion modes -const INITIAL_MODE = 'INITIAL_MODE'; -const BEFORE_HTML_MODE = 'BEFORE_HTML_MODE'; -const BEFORE_HEAD_MODE = 'BEFORE_HEAD_MODE'; -const IN_HEAD_MODE = 'IN_HEAD_MODE'; -const IN_HEAD_NO_SCRIPT_MODE = 'IN_HEAD_NO_SCRIPT_MODE'; -const AFTER_HEAD_MODE = 'AFTER_HEAD_MODE'; -const IN_BODY_MODE = 'IN_BODY_MODE'; -const TEXT_MODE = 'TEXT_MODE'; -const IN_TABLE_MODE = 'IN_TABLE_MODE'; -const IN_TABLE_TEXT_MODE = 'IN_TABLE_TEXT_MODE'; -const IN_CAPTION_MODE = 'IN_CAPTION_MODE'; -const IN_COLUMN_GROUP_MODE = 'IN_COLUMN_GROUP_MODE'; -const IN_TABLE_BODY_MODE = 'IN_TABLE_BODY_MODE'; -const IN_ROW_MODE = 'IN_ROW_MODE'; -const IN_CELL_MODE = 'IN_CELL_MODE'; -const IN_SELECT_MODE = 'IN_SELECT_MODE'; -const IN_SELECT_IN_TABLE_MODE = 'IN_SELECT_IN_TABLE_MODE'; -const IN_TEMPLATE_MODE = 'IN_TEMPLATE_MODE'; -const AFTER_BODY_MODE = 'AFTER_BODY_MODE'; -const IN_FRAMESET_MODE = 'IN_FRAMESET_MODE'; -const AFTER_FRAMESET_MODE = 'AFTER_FRAMESET_MODE'; -const AFTER_AFTER_BODY_MODE = 'AFTER_AFTER_BODY_MODE'; -const AFTER_AFTER_FRAMESET_MODE = 'AFTER_AFTER_FRAMESET_MODE'; - -//Insertion mode reset map -const INSERTION_MODE_RESET_MAP = { - [$.TR]: IN_ROW_MODE, - [$.TBODY]: IN_TABLE_BODY_MODE, - [$.THEAD]: IN_TABLE_BODY_MODE, - [$.TFOOT]: IN_TABLE_BODY_MODE, - [$.CAPTION]: IN_CAPTION_MODE, - [$.COLGROUP]: IN_COLUMN_GROUP_MODE, - [$.TABLE]: IN_TABLE_MODE, - [$.BODY]: IN_BODY_MODE, - [$.FRAMESET]: IN_FRAMESET_MODE -}; - -//Template insertion mode switch map -const TEMPLATE_INSERTION_MODE_SWITCH_MAP = { - [$.CAPTION]: IN_TABLE_MODE, - [$.COLGROUP]: IN_TABLE_MODE, - [$.TBODY]: IN_TABLE_MODE, - [$.TFOOT]: IN_TABLE_MODE, - [$.THEAD]: IN_TABLE_MODE, - [$.COL]: IN_COLUMN_GROUP_MODE, - [$.TR]: IN_TABLE_BODY_MODE, - [$.TD]: IN_ROW_MODE, - [$.TH]: IN_ROW_MODE -}; - -//Token handlers map for insertion modes -const TOKEN_HANDLERS = { - [INITIAL_MODE]: { - [Tokenizer.CHARACTER_TOKEN]: tokenInInitialMode, - [Tokenizer.NULL_CHARACTER_TOKEN]: tokenInInitialMode, - [Tokenizer.WHITESPACE_CHARACTER_TOKEN]: ignoreToken, - [Tokenizer.COMMENT_TOKEN]: appendComment, - [Tokenizer.DOCTYPE_TOKEN]: doctypeInInitialMode, - [Tokenizer.START_TAG_TOKEN]: tokenInInitialMode, - [Tokenizer.END_TAG_TOKEN]: tokenInInitialMode, - [Tokenizer.EOF_TOKEN]: tokenInInitialMode - }, - [BEFORE_HTML_MODE]: { - [Tokenizer.CHARACTER_TOKEN]: tokenBeforeHtml, - [Tokenizer.NULL_CHARACTER_TOKEN]: tokenBeforeHtml, - [Tokenizer.WHITESPACE_CHARACTER_TOKEN]: ignoreToken, - [Tokenizer.COMMENT_TOKEN]: appendComment, - [Tokenizer.DOCTYPE_TOKEN]: ignoreToken, - [Tokenizer.START_TAG_TOKEN]: startTagBeforeHtml, - [Tokenizer.END_TAG_TOKEN]: endTagBeforeHtml, - [Tokenizer.EOF_TOKEN]: tokenBeforeHtml - }, - [BEFORE_HEAD_MODE]: { - [Tokenizer.CHARACTER_TOKEN]: tokenBeforeHead, - [Tokenizer.NULL_CHARACTER_TOKEN]: tokenBeforeHead, - [Tokenizer.WHITESPACE_CHARACTER_TOKEN]: ignoreToken, - [Tokenizer.COMMENT_TOKEN]: appendComment, - [Tokenizer.DOCTYPE_TOKEN]: misplacedDoctype, - [Tokenizer.START_TAG_TOKEN]: startTagBeforeHead, - [Tokenizer.END_TAG_TOKEN]: endTagBeforeHead, - [Tokenizer.EOF_TOKEN]: tokenBeforeHead - }, - [IN_HEAD_MODE]: { - [Tokenizer.CHARACTER_TOKEN]: tokenInHead, - [Tokenizer.NULL_CHARACTER_TOKEN]: tokenInHead, - [Tokenizer.WHITESPACE_CHARACTER_TOKEN]: insertCharacters, - [Tokenizer.COMMENT_TOKEN]: appendComment, - [Tokenizer.DOCTYPE_TOKEN]: misplacedDoctype, - [Tokenizer.START_TAG_TOKEN]: startTagInHead, - [Tokenizer.END_TAG_TOKEN]: endTagInHead, - [Tokenizer.EOF_TOKEN]: tokenInHead - }, - [IN_HEAD_NO_SCRIPT_MODE]: { - [Tokenizer.CHARACTER_TOKEN]: tokenInHeadNoScript, - [Tokenizer.NULL_CHARACTER_TOKEN]: tokenInHeadNoScript, - [Tokenizer.WHITESPACE_CHARACTER_TOKEN]: insertCharacters, - [Tokenizer.COMMENT_TOKEN]: appendComment, - [Tokenizer.DOCTYPE_TOKEN]: misplacedDoctype, - [Tokenizer.START_TAG_TOKEN]: startTagInHeadNoScript, - [Tokenizer.END_TAG_TOKEN]: endTagInHeadNoScript, - [Tokenizer.EOF_TOKEN]: tokenInHeadNoScript - }, - [AFTER_HEAD_MODE]: { - [Tokenizer.CHARACTER_TOKEN]: tokenAfterHead, - [Tokenizer.NULL_CHARACTER_TOKEN]: tokenAfterHead, - [Tokenizer.WHITESPACE_CHARACTER_TOKEN]: insertCharacters, - [Tokenizer.COMMENT_TOKEN]: appendComment, - [Tokenizer.DOCTYPE_TOKEN]: misplacedDoctype, - [Tokenizer.START_TAG_TOKEN]: startTagAfterHead, - [Tokenizer.END_TAG_TOKEN]: endTagAfterHead, - [Tokenizer.EOF_TOKEN]: tokenAfterHead - }, - [IN_BODY_MODE]: { - [Tokenizer.CHARACTER_TOKEN]: characterInBody, - [Tokenizer.NULL_CHARACTER_TOKEN]: ignoreToken, - [Tokenizer.WHITESPACE_CHARACTER_TOKEN]: whitespaceCharacterInBody, - [Tokenizer.COMMENT_TOKEN]: appendComment, - [Tokenizer.DOCTYPE_TOKEN]: ignoreToken, - [Tokenizer.START_TAG_TOKEN]: startTagInBody, - [Tokenizer.END_TAG_TOKEN]: endTagInBody, - [Tokenizer.EOF_TOKEN]: eofInBody - }, - [TEXT_MODE]: { - [Tokenizer.CHARACTER_TOKEN]: insertCharacters, - [Tokenizer.NULL_CHARACTER_TOKEN]: insertCharacters, - [Tokenizer.WHITESPACE_CHARACTER_TOKEN]: insertCharacters, - [Tokenizer.COMMENT_TOKEN]: ignoreToken, - [Tokenizer.DOCTYPE_TOKEN]: ignoreToken, - [Tokenizer.START_TAG_TOKEN]: ignoreToken, - [Tokenizer.END_TAG_TOKEN]: endTagInText, - [Tokenizer.EOF_TOKEN]: eofInText - }, - [IN_TABLE_MODE]: { - [Tokenizer.CHARACTER_TOKEN]: characterInTable, - [Tokenizer.NULL_CHARACTER_TOKEN]: characterInTable, - [Tokenizer.WHITESPACE_CHARACTER_TOKEN]: characterInTable, - [Tokenizer.COMMENT_TOKEN]: appendComment, - [Tokenizer.DOCTYPE_TOKEN]: ignoreToken, - [Tokenizer.START_TAG_TOKEN]: startTagInTable, - [Tokenizer.END_TAG_TOKEN]: endTagInTable, - [Tokenizer.EOF_TOKEN]: eofInBody - }, - [IN_TABLE_TEXT_MODE]: { - [Tokenizer.CHARACTER_TOKEN]: characterInTableText, - [Tokenizer.NULL_CHARACTER_TOKEN]: ignoreToken, - [Tokenizer.WHITESPACE_CHARACTER_TOKEN]: whitespaceCharacterInTableText, - [Tokenizer.COMMENT_TOKEN]: tokenInTableText, - [Tokenizer.DOCTYPE_TOKEN]: tokenInTableText, - [Tokenizer.START_TAG_TOKEN]: tokenInTableText, - [Tokenizer.END_TAG_TOKEN]: tokenInTableText, - [Tokenizer.EOF_TOKEN]: tokenInTableText - }, - [IN_CAPTION_MODE]: { - [Tokenizer.CHARACTER_TOKEN]: characterInBody, - [Tokenizer.NULL_CHARACTER_TOKEN]: ignoreToken, - [Tokenizer.WHITESPACE_CHARACTER_TOKEN]: whitespaceCharacterInBody, - [Tokenizer.COMMENT_TOKEN]: appendComment, - [Tokenizer.DOCTYPE_TOKEN]: ignoreToken, - [Tokenizer.START_TAG_TOKEN]: startTagInCaption, - [Tokenizer.END_TAG_TOKEN]: endTagInCaption, - [Tokenizer.EOF_TOKEN]: eofInBody - }, - [IN_COLUMN_GROUP_MODE]: { - [Tokenizer.CHARACTER_TOKEN]: tokenInColumnGroup, - [Tokenizer.NULL_CHARACTER_TOKEN]: tokenInColumnGroup, - [Tokenizer.WHITESPACE_CHARACTER_TOKEN]: insertCharacters, - [Tokenizer.COMMENT_TOKEN]: appendComment, - [Tokenizer.DOCTYPE_TOKEN]: ignoreToken, - [Tokenizer.START_TAG_TOKEN]: startTagInColumnGroup, - [Tokenizer.END_TAG_TOKEN]: endTagInColumnGroup, - [Tokenizer.EOF_TOKEN]: eofInBody - }, - [IN_TABLE_BODY_MODE]: { - [Tokenizer.CHARACTER_TOKEN]: characterInTable, - [Tokenizer.NULL_CHARACTER_TOKEN]: characterInTable, - [Tokenizer.WHITESPACE_CHARACTER_TOKEN]: characterInTable, - [Tokenizer.COMMENT_TOKEN]: appendComment, - [Tokenizer.DOCTYPE_TOKEN]: ignoreToken, - [Tokenizer.START_TAG_TOKEN]: startTagInTableBody, - [Tokenizer.END_TAG_TOKEN]: endTagInTableBody, - [Tokenizer.EOF_TOKEN]: eofInBody - }, - [IN_ROW_MODE]: { - [Tokenizer.CHARACTER_TOKEN]: characterInTable, - [Tokenizer.NULL_CHARACTER_TOKEN]: characterInTable, - [Tokenizer.WHITESPACE_CHARACTER_TOKEN]: characterInTable, - [Tokenizer.COMMENT_TOKEN]: appendComment, - [Tokenizer.DOCTYPE_TOKEN]: ignoreToken, - [Tokenizer.START_TAG_TOKEN]: startTagInRow, - [Tokenizer.END_TAG_TOKEN]: endTagInRow, - [Tokenizer.EOF_TOKEN]: eofInBody - }, - [IN_CELL_MODE]: { - [Tokenizer.CHARACTER_TOKEN]: characterInBody, - [Tokenizer.NULL_CHARACTER_TOKEN]: ignoreToken, - [Tokenizer.WHITESPACE_CHARACTER_TOKEN]: whitespaceCharacterInBody, - [Tokenizer.COMMENT_TOKEN]: appendComment, - [Tokenizer.DOCTYPE_TOKEN]: ignoreToken, - [Tokenizer.START_TAG_TOKEN]: startTagInCell, - [Tokenizer.END_TAG_TOKEN]: endTagInCell, - [Tokenizer.EOF_TOKEN]: eofInBody - }, - [IN_SELECT_MODE]: { - [Tokenizer.CHARACTER_TOKEN]: insertCharacters, - [Tokenizer.NULL_CHARACTER_TOKEN]: ignoreToken, - [Tokenizer.WHITESPACE_CHARACTER_TOKEN]: insertCharacters, - [Tokenizer.COMMENT_TOKEN]: appendComment, - [Tokenizer.DOCTYPE_TOKEN]: ignoreToken, - [Tokenizer.START_TAG_TOKEN]: startTagInSelect, - [Tokenizer.END_TAG_TOKEN]: endTagInSelect, - [Tokenizer.EOF_TOKEN]: eofInBody - }, - [IN_SELECT_IN_TABLE_MODE]: { - [Tokenizer.CHARACTER_TOKEN]: insertCharacters, - [Tokenizer.NULL_CHARACTER_TOKEN]: ignoreToken, - [Tokenizer.WHITESPACE_CHARACTER_TOKEN]: insertCharacters, - [Tokenizer.COMMENT_TOKEN]: appendComment, - [Tokenizer.DOCTYPE_TOKEN]: ignoreToken, - [Tokenizer.START_TAG_TOKEN]: startTagInSelectInTable, - [Tokenizer.END_TAG_TOKEN]: endTagInSelectInTable, - [Tokenizer.EOF_TOKEN]: eofInBody - }, - [IN_TEMPLATE_MODE]: { - [Tokenizer.CHARACTER_TOKEN]: characterInBody, - [Tokenizer.NULL_CHARACTER_TOKEN]: ignoreToken, - [Tokenizer.WHITESPACE_CHARACTER_TOKEN]: whitespaceCharacterInBody, - [Tokenizer.COMMENT_TOKEN]: appendComment, - [Tokenizer.DOCTYPE_TOKEN]: ignoreToken, - [Tokenizer.START_TAG_TOKEN]: startTagInTemplate, - [Tokenizer.END_TAG_TOKEN]: endTagInTemplate, - [Tokenizer.EOF_TOKEN]: eofInTemplate - }, - [AFTER_BODY_MODE]: { - [Tokenizer.CHARACTER_TOKEN]: tokenAfterBody, - [Tokenizer.NULL_CHARACTER_TOKEN]: tokenAfterBody, - [Tokenizer.WHITESPACE_CHARACTER_TOKEN]: whitespaceCharacterInBody, - [Tokenizer.COMMENT_TOKEN]: appendCommentToRootHtmlElement, - [Tokenizer.DOCTYPE_TOKEN]: ignoreToken, - [Tokenizer.START_TAG_TOKEN]: startTagAfterBody, - [Tokenizer.END_TAG_TOKEN]: endTagAfterBody, - [Tokenizer.EOF_TOKEN]: stopParsing - }, - [IN_FRAMESET_MODE]: { - [Tokenizer.CHARACTER_TOKEN]: ignoreToken, - [Tokenizer.NULL_CHARACTER_TOKEN]: ignoreToken, - [Tokenizer.WHITESPACE_CHARACTER_TOKEN]: insertCharacters, - [Tokenizer.COMMENT_TOKEN]: appendComment, - [Tokenizer.DOCTYPE_TOKEN]: ignoreToken, - [Tokenizer.START_TAG_TOKEN]: startTagInFrameset, - [Tokenizer.END_TAG_TOKEN]: endTagInFrameset, - [Tokenizer.EOF_TOKEN]: stopParsing - }, - [AFTER_FRAMESET_MODE]: { - [Tokenizer.CHARACTER_TOKEN]: ignoreToken, - [Tokenizer.NULL_CHARACTER_TOKEN]: ignoreToken, - [Tokenizer.WHITESPACE_CHARACTER_TOKEN]: insertCharacters, - [Tokenizer.COMMENT_TOKEN]: appendComment, - [Tokenizer.DOCTYPE_TOKEN]: ignoreToken, - [Tokenizer.START_TAG_TOKEN]: startTagAfterFrameset, - [Tokenizer.END_TAG_TOKEN]: endTagAfterFrameset, - [Tokenizer.EOF_TOKEN]: stopParsing - }, - [AFTER_AFTER_BODY_MODE]: { - [Tokenizer.CHARACTER_TOKEN]: tokenAfterAfterBody, - [Tokenizer.NULL_CHARACTER_TOKEN]: tokenAfterAfterBody, - [Tokenizer.WHITESPACE_CHARACTER_TOKEN]: whitespaceCharacterInBody, - [Tokenizer.COMMENT_TOKEN]: appendCommentToDocument, - [Tokenizer.DOCTYPE_TOKEN]: ignoreToken, - [Tokenizer.START_TAG_TOKEN]: startTagAfterAfterBody, - [Tokenizer.END_TAG_TOKEN]: tokenAfterAfterBody, - [Tokenizer.EOF_TOKEN]: stopParsing - }, - [AFTER_AFTER_FRAMESET_MODE]: { - [Tokenizer.CHARACTER_TOKEN]: ignoreToken, - [Tokenizer.NULL_CHARACTER_TOKEN]: ignoreToken, - [Tokenizer.WHITESPACE_CHARACTER_TOKEN]: whitespaceCharacterInBody, - [Tokenizer.COMMENT_TOKEN]: appendCommentToDocument, - [Tokenizer.DOCTYPE_TOKEN]: ignoreToken, - [Tokenizer.START_TAG_TOKEN]: startTagAfterAfterFrameset, - [Tokenizer.END_TAG_TOKEN]: ignoreToken, - [Tokenizer.EOF_TOKEN]: stopParsing - } -}; - -//Parser -class Parser { - constructor(options) { - this.options = mergeOptions(DEFAULT_OPTIONS, options); - - this.treeAdapter = this.options.treeAdapter; - this.pendingScript = null; - - if (this.options.sourceCodeLocationInfo) { - Mixin.install(this, LocationInfoParserMixin); - } - - if (this.options.onParseError) { - Mixin.install(this, ErrorReportingParserMixin, { onParseError: this.options.onParseError }); - } - } - - // API - parse(html) { - const document = this.treeAdapter.createDocument(); - - this._bootstrap(document, null); - this.tokenizer.write(html, true); - this._runParsingLoop(null); - - return document; - } - - parseFragment(html, fragmentContext) { - //NOTE: use