diff --git a/.github/dependency-review.yml b/.github/dependency-review.yml deleted file mode 100644 index 527cb3e30..000000000 --- a/.github/dependency-review.yml +++ /dev/null @@ -1,8 +0,0 @@ -fail_on_severity: low -allow_licenses: - - 'GPL 3.0' - - 'BSD 3 Clause' - - 'MIT' -#deny_licenses: -# - "LGPL 2.0" -# - "BSD 2 Clause" diff --git a/README.md b/README.md index 3501db3b5..085fd83ff 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ This action scans your pull requests for dependency changes and will raise an error if any new dependencies have existing vulnerabilities. The action is supported by an [API endpoint](https://docs.github.com/en/rest/reference/dependency-graph#dependency-review) that diffs the dependencies between any two revisions. -The action is available for all public repositories, as well as private repositories that have Github Advanced Security licensed. +The action is available for all public repositories, as well as private repositories that have Github Advanced Security licensed. Screen Shot 2022-03-31 at 1 10 51 PM @@ -25,10 +25,99 @@ jobs: - name: 'Checkout Repository' uses: actions/checkout@v3 - name: 'Dependency Review' - uses: actions/dependency-review-action@v1 + uses: actions/dependency-review-action@v2 ``` -Please keep in mind that you need a GitHub Advanced Security license if you're running this Action on private repos. +Please keep in mind that you need a GitHub Advanced Security license if you're running this action on private repos. + +## Configuration +You can pass additional options to the Dependency Review +Action using your workflow file. Here's an example workflow with +all the possible configurations: + +```yaml +name: 'Dependency Review' +on: [pull_request] +permissions: + contents: read +jobs: + dependency-review: + runs-on: ubuntu-latest + steps: + - name: 'Checkout Repository' + uses: actions/checkout@v3 + - name: Dependency Review + uses: actions/dependency-review-action@v2 + with: + # Possible values: "critical", "high", "moderate", "low" + # fail-on-severity: critical + # + # You can only can only include one of these two options: `allow-licenses` and `deny-licences` + # + # Possible values: Any `spdx_id` value(s) from https://docs.github.com/en/rest/licenses + # allow-licenses: GPL-3.0, BSD-3-Clause, MIT + # + # Possible values: Any `spdx_id` value(s) from https://docs.github.com/en/rest/licenses + # deny-licenses: LGPL-2.0, BSD-2-Clause +``` + +### Vulnerability Severity + +By default the action will fail on any pull request that contains a +vulnerable dependency, regardless of the severity level. You can override this behavior by +using the `fail-on-severity` option, which will cause a failure on any pull requests that introduce vulnerabilities of the specified severity level or higher. The possible values are: `critical`, `high`, `moderate`, or `low`. The +action defaults to `low`. + +This example will only fail on pull requests with `critical` and `high` vulnerabilities: + +```yaml +- name: Dependency Review + uses: actions/dependency-review-action@v2 + with: + fail-on-severity: high +``` + +### Licenses + +You can set the action to fail on pull requests based on the licenses of the dependencies +they introduce. With `allow-licenses` you can define the list of licenses +your repository will accept. Alternatively, you can use `deny-licenses` to only +forbid a subset of licenses. + +You can use the [Licenses +API](https://docs.github.com/en/rest/licenses) to see the full list of +supported licenses. Use the `spdx_id` field for every license you want +to filter. A couple of examples: + +```yaml +# only allow MIT-licensed dependents +- name: Dependency Review + uses: actions/dependency-review-action@v2 + with: + allow-licenses: MIT +``` + +```yaml +# Block Apache 1.1 and 2.0 licensed dependents +- name: Dependency Review + uses: actions/dependency-review-action@v2 + with: + deny-licenses: Apache-1.1, Apache-2.0 +``` + +**Important** + +* The action will only accept one of the two parameters; an error will +be raised if you provide both. +* By default both parameters are empty (no license checking is +performed). +* We don't have license information for all of your dependents. If we +can't detect the license for a dependency **we will inform you, but the +action won't fail**. + +## Blocking pull requests + +The Dependency Review GitHub Action check will only block a pull request from being merged if the repository owner has required the check to pass before merging. For more information, see the [documentation on protected branches](https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests/about-protected-branches#require-status-checks-before-merging). ## Getting help @@ -37,7 +126,7 @@ issue](https://github.com/actions/dependency-review-action/issues/new/choose). ## Contributing -We are grateful for any contributions made to this project. +We are grateful for any contributions made to this project. Please read [CONTRIBUTING.MD](https://github.com/actions/dependency-review-action/blob/main/CONTRIBUTING.md) to get started. diff --git a/__tests__/config.test.ts b/__tests__/config.test.ts index 53946f68d..042c5900d 100644 --- a/__tests__/config.test.ts +++ b/__tests__/config.test.ts @@ -1,31 +1,53 @@ -import {expect, test} from '@jest/globals' -import {readConfigFile} from '../src/config' +import {expect, test, beforeEach} from '@jest/globals' +import {readConfig} from '../src/config' -test('reads the config file', async () => { - let options = readConfigFile('./__tests__/fixtures/config-allow-sample.yml') - expect(options.fail_on_severity).toEqual('critical') - expect(options.allow_licenses).toEqual(['BSD', 'GPL 2']) -}) +// GitHub Action inputs come in the form of environment variables +// with an INPUT prefix (e.g. INPUT_FAIL-ON-SEVERITY) +function setInput(input: string, value: string) { + process.env[`INPUT_${input.toUpperCase()}`] = value +} + +// We want a clean ENV before each test. We use `delete` +// since we want `undefined` values and not empty strings. +function clearInputs() { + delete process.env['INPUT_FAIL-ON-SEVERITY'] + delete process.env['INPUT_ALLOW-LICENSES'] + delete process.env['INPUT_DENY-LICENSES'] +} -test('the default config path handles .yml and .yaml', async () => { - expect(true).toEqual(true) +beforeEach(() => { + clearInputs() }) -test('returns a default config when the config file was not found', async () => { - let options = readConfigFile('fixtures/i-dont-exist') +test('it defaults to low severity', async () => { + const options = readConfig() expect(options.fail_on_severity).toEqual('low') - expect(options.allow_licenses).toEqual(undefined) }) -test('it reads config files with empty options', async () => { - let options = readConfigFile('./__tests__/fixtures/no-licenses-config.yml') +test('it reads custom configs', async () => { + setInput('fail-on-severity', 'critical') + setInput('allow-licenses', ' BSD, GPL 2') + + const options = readConfig() expect(options.fail_on_severity).toEqual('critical') + expect(options.allow_licenses).toEqual(['BSD', 'GPL 2']) +}) + +test('it defaults to empty allow/deny lists ', async () => { + const options = readConfig() + expect(options.allow_licenses).toEqual(undefined) expect(options.deny_licenses).toEqual(undefined) }) test('it raises an error if both an allow and denylist are specified', async () => { - expect(() => - readConfigFile('./__tests__/fixtures/conflictive-config.yml') - ).toThrow() + setInput('allow-licenses', 'MIT') + setInput('deny-licenses', 'BSD') + + expect(() => readConfig()).toThrow() +}) + +test('it raises an error when given an unknown severity', async () => { + setInput('fail-on-severity', 'zombies') + expect(() => readConfig()).toThrow() }) diff --git a/__tests__/licenses.test.ts b/__tests__/licenses.test.ts index ead78f597..1e22aa65d 100644 --- a/__tests__/licenses.test.ts +++ b/__tests__/licenses.test.ts @@ -48,13 +48,13 @@ let rubyChange: Change = { test('it fails if a license outside the allow list is found', async () => { const changes: Changes = [npmChange, rubyChange] - const invalidChanges = getDeniedLicenseChanges(changes, {allow: ['BSD']}) + const [invalidChanges, _] = getDeniedLicenseChanges(changes, {allow: ['BSD']}) expect(invalidChanges[0]).toBe(npmChange) }) test('it fails if a license inside the deny list is found', async () => { const changes: Changes = [npmChange, rubyChange] - const invalidChanges = getDeniedLicenseChanges(changes, {deny: ['BSD']}) + const [invalidChanges] = getDeniedLicenseChanges(changes, {deny: ['BSD']}) expect(invalidChanges[0]).toBe(rubyChange) }) @@ -62,7 +62,7 @@ test('it fails if a license inside the deny list is found', async () => { // thing we want in the system. Please remove this test after refactoring. test('it fails all license checks when allow is provided an empty array', async () => { const changes: Changes = [npmChange, rubyChange] - let invalidChanges = getDeniedLicenseChanges(changes, { + let [invalidChanges, _] = getDeniedLicenseChanges(changes, { allow: [], deny: ['BSD'] }) diff --git a/action.yml b/action.yml index 631d25799..bc65e3002 100644 --- a/action.yml +++ b/action.yml @@ -3,9 +3,19 @@ description: 'Prevent the introduction of dependencies with known vulnerabilitie author: 'GitHub' inputs: repo-token: - description: 'Token for the repository. Can be passed in using `{{ secrets.GITHUB_TOKEN }}`.' + description: Token for the repository. Can be passed in using `{{ secrets.GITHUB_TOKEN }}`. required: false default: ${{ github.token }} + fail-on-severity: + description: Don't block PRs below this severity. Possible values are `low`, `moderate`, `high`, `critical`. + required: false + default: 'low' + allow-licenses: + description: Comma-separated list of allowed licenses (e.g. "MIT, GPL 3.0, BSD 2 Clause") + required: false + deny-licenses: + description: Comma-separated list of forbidden licenses (e.g. "MIT, GPL 3.0, BSD 2 Clause") + required: false runs: using: 'node16' main: 'dist/index.js' diff --git a/dist/index.js b/dist/index.js index 9ef3ca98b..2ddad4f9a 100644 --- a/dist/index.js +++ b/dist/index.js @@ -78,15 +78,17 @@ exports.getDeniedLicenseChanges = void 0; * we will ignore the deny list. * @param {Change[]} changes The list of changes to filter. * @param { { allow?: string[], deny?: string[]}} licenses An object with `allow`/`deny` keys, each containing a list of licenses. - * @returns {Array, Array 0) { - printLicensesError(licenseErrors, licenses); - core.setFailed('Dependency review detected prohibited licenses.'); - return; - } let filteredChanges = (0, filter_1.filterChangesBySeverity)(minSeverity, changes); for (const change of filteredChanges) { if (change.change_type === 'added' && @@ -192,11 +188,17 @@ function run() { failed = true; } } + let [licenseErrors, unknownLicenses] = (0, licenses_1.getDeniedLicenseChanges)(changes, licenses); + if (licenseErrors.length > 0) { + printLicensesError(licenseErrors, licenses); + printNullLicenses(unknownLicenses); + core.setFailed('Dependency review detected incompatible licenses.'); + } if (failed) { - throw new Error('Dependency review detected vulnerable packages.'); + core.setFailed('Dependency review detected vulnerable packages.'); } else { - core.info(`Dependency review did not detect any vulnerable packages with severity level "${minSeverity}" or above.`); + core.info(`Dependency review did not detect any vulnerable packages with severity level "${minSeverity}" or higher.`); } } catch (error) { @@ -237,18 +239,17 @@ function printLicensesError(changes, licenses) { return; } let { allow = [], deny = [] } = licenses; - core.info('Dependency review detected prohibited licenses.'); - if (allow.length > 0) { - core.info('\nAllowed licenses: ' + allow.join(', ') + '\n'); - } - if (deny.length > 0) { - core.info('\nDenied licenses: ' + deny.join(', ') + '\n'); - } - core.info('The following dependencies have incompatible licenses:\n'); + core.info('\nThe following dependencies have incompatible licenses:\n'); for (const change of changes) { core.info(`${ansi_styles_1.default.bold.open}${change.manifest} » ${change.name}@${change.version}${ansi_styles_1.default.bold.close} – License: ${ansi_styles_1.default.color.red.open}${change.license}${ansi_styles_1.default.color.red.close}`); } } +function printNullLicenses(changes) { + core.info('\nWe could not detect a license for the following dependencies:\n'); + for (const change of changes) { + core.info(`${ansi_styles_1.default.bold.open}${change.manifest} » ${change.name}@${change.version}${ansi_styles_1.default.bold.close}`); + } +} run(); @@ -317,7 +318,7 @@ exports.ConfigurationOptionsSchema = z deny_licenses: z.array(z.string()).default([]) }) .partial() - .refine(obj => !(obj.allow_licenses && obj.deny_licenses), "Can't specify both allow_licenses and deny_licenses"); + .refine(obj => !(obj.allow_licenses && obj.deny_licenses), "Your workflow file has both an allow_licenses list and deny_licenses list, but you can only set one or the other."); exports.ChangesSchema = z.array(exports.ChangeSchema); @@ -13698,36 +13699,32 @@ var __importStar = (this && this.__importStar) || function (mod) { __setModuleDefault(result, mod); return result; }; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.readConfigFile = exports.CONFIG_FILEPATH = void 0; -const fs = __importStar(__nccwpck_require__(7147)); -const yaml_1 = __importDefault(__nccwpck_require__(4083)); +exports.readConfig = void 0; +const core = __importStar(__nccwpck_require__(2186)); +const z = __importStar(__nccwpck_require__(3301)); const schemas_1 = __nccwpck_require__(1129); -const path_1 = __importDefault(__nccwpck_require__(1017)); -exports.CONFIG_FILEPATH = './.github/dependency-review.yml'; -function readConfigFile(filePath = exports.CONFIG_FILEPATH) { - // By default we want to fail on all severities and allow all licenses. - const defaultOptions = { - fail_on_severity: 'low' - }; - let data; - try { - data = fs.readFileSync(path_1.default.resolve(filePath), 'utf-8'); - } - catch (error) { - if (error.code && error.code === 'ENOENT') { - return defaultOptions; - } - else { - throw error; - } +function getOptionalInput(name) { + const value = core.getInput(name); + return value.length > 0 ? value : undefined; +} +function readConfig() { + const fail_on_severity = z + .enum(schemas_1.SEVERITIES) + .default('low') + .parse(getOptionalInput('fail-on-severity')); + const allow_licenses = getOptionalInput('allow-licenses'); + const deny_licenses = getOptionalInput('deny-licenses'); + if (allow_licenses !== undefined && deny_licenses !== undefined) { + throw new Error("Can't specify both allow_licenses and deny_licenses"); } - return schemas_1.ConfigurationOptionsSchema.parse(yaml_1.default.parse(data)); + return { + fail_on_severity, + allow_licenses: allow_licenses === null || allow_licenses === void 0 ? void 0 : allow_licenses.split(',').map(x => x.trim()), + deny_licenses: deny_licenses === null || deny_licenses === void 0 ? void 0 : deny_licenses.split(',').map(x => x.trim()) + }; } -exports.readConfigFile = readConfigFile; +exports.readConfig = readConfig; /***/ }), @@ -13829,7 +13826,7 @@ exports.ConfigurationOptionsSchema = z deny_licenses: z.array(z.string()).default([]) }) .partial() - .refine(obj => !(obj.allow_licenses && obj.deny_licenses), "Can't specify both allow_licenses and deny_licenses"); + .refine(obj => !(obj.allow_licenses && obj.deny_licenses), "Your workflow file has both an allow_licenses list and deny_licenses list, but you can only set one or the other."); exports.ChangesSchema = z.array(exports.ChangeSchema); @@ -13953,8298 +13950,6 @@ module.exports = require("util"); "use strict"; module.exports = require("zlib"); -/***/ }), - -/***/ 8109: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(1399); -var Scalar = __nccwpck_require__(9338); -var resolveBlockMap = __nccwpck_require__(2986); -var resolveBlockSeq = __nccwpck_require__(2289); -var resolveFlowCollection = __nccwpck_require__(45); - -function composeCollection(CN, ctx, token, tagToken, onError) { - let coll; - switch (token.type) { - case 'block-map': { - coll = resolveBlockMap.resolveBlockMap(CN, ctx, token, onError); - break; - } - case 'block-seq': { - coll = resolveBlockSeq.resolveBlockSeq(CN, ctx, token, onError); - break; - } - case 'flow-collection': { - coll = resolveFlowCollection.resolveFlowCollection(CN, ctx, token, onError); - break; - } - } - if (!tagToken) - return coll; - const tagName = ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg)); - if (!tagName) - return coll; - // Cast needed due to: https://github.com/Microsoft/TypeScript/issues/3841 - const Coll = coll.constructor; - if (tagName === '!' || tagName === Coll.tagName) { - coll.tag = Coll.tagName; - return coll; - } - const expType = Node.isMap(coll) ? 'map' : 'seq'; - let tag = ctx.schema.tags.find(t => t.collection === expType && t.tag === tagName); - if (!tag) { - const kt = ctx.schema.knownTags[tagName]; - if (kt && kt.collection === expType) { - ctx.schema.tags.push(Object.assign({}, kt, { default: false })); - tag = kt; - } - else { - onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true); - coll.tag = tagName; - return coll; - } - } - const res = tag.resolve(coll, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg), ctx.options); - const node = Node.isNode(res) - ? res - : new Scalar.Scalar(res); - node.range = coll.range; - node.tag = tagName; - if (tag?.format) - node.format = tag.format; - return node; -} - -exports.composeCollection = composeCollection; - - -/***/ }), - -/***/ 5050: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Document = __nccwpck_require__(42); -var composeNode = __nccwpck_require__(8676); -var resolveEnd = __nccwpck_require__(1250); -var resolveProps = __nccwpck_require__(6985); - -function composeDoc(options, directives, { offset, start, value, end }, onError) { - const opts = Object.assign({ _directives: directives }, options); - const doc = new Document.Document(undefined, opts); - const ctx = { - atRoot: true, - directives: doc.directives, - options: doc.options, - schema: doc.schema - }; - const props = resolveProps.resolveProps(start, { - indicator: 'doc-start', - next: value ?? end?.[0], - offset, - onError, - startOnNewline: true - }); - if (props.found) { - doc.directives.docStart = true; - if (value && - (value.type === 'block-map' || value.type === 'block-seq') && - !props.hasNewline) - onError(props.end, 'MISSING_CHAR', 'Block collection cannot start on same line with directives-end marker'); - } - doc.contents = value - ? composeNode.composeNode(ctx, value, props, onError) - : composeNode.composeEmptyNode(ctx, props.end, start, null, props, onError); - const contentEnd = doc.contents.range[2]; - const re = resolveEnd.resolveEnd(end, contentEnd, false, onError); - if (re.comment) - doc.comment = re.comment; - doc.range = [offset, contentEnd, re.offset]; - return doc; -} - -exports.composeDoc = composeDoc; - - -/***/ }), - -/***/ 8676: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Alias = __nccwpck_require__(5639); -var composeCollection = __nccwpck_require__(8109); -var composeScalar = __nccwpck_require__(4766); -var resolveEnd = __nccwpck_require__(1250); -var utilEmptyScalarPosition = __nccwpck_require__(8781); - -const CN = { composeNode, composeEmptyNode }; -function composeNode(ctx, token, props, onError) { - const { spaceBefore, comment, anchor, tag } = props; - let node; - let isSrcToken = true; - switch (token.type) { - case 'alias': - node = composeAlias(ctx, token, onError); - if (anchor || tag) - onError(token, 'ALIAS_PROPS', 'An alias node must not specify any properties'); - break; - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - case 'block-scalar': - node = composeScalar.composeScalar(ctx, token, tag, onError); - if (anchor) - node.anchor = anchor.source.substring(1); - break; - case 'block-map': - case 'block-seq': - case 'flow-collection': - node = composeCollection.composeCollection(CN, ctx, token, tag, onError); - if (anchor) - node.anchor = anchor.source.substring(1); - break; - default: { - const message = token.type === 'error' - ? token.message - : `Unsupported token (type: ${token.type})`; - onError(token, 'UNEXPECTED_TOKEN', message); - node = composeEmptyNode(ctx, token.offset, undefined, null, props, onError); - isSrcToken = false; - } - } - if (anchor && node.anchor === '') - onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string'); - if (spaceBefore) - node.spaceBefore = true; - if (comment) { - if (token.type === 'scalar' && token.source === '') - node.comment = comment; - else - node.commentBefore = comment; - } - // @ts-expect-error Type checking misses meaning of isSrcToken - if (ctx.options.keepSourceTokens && isSrcToken) - node.srcToken = token; - return node; -} -function composeEmptyNode(ctx, offset, before, pos, { spaceBefore, comment, anchor, tag }, onError) { - const token = { - type: 'scalar', - offset: utilEmptyScalarPosition.emptyScalarPosition(offset, before, pos), - indent: -1, - source: '' - }; - const node = composeScalar.composeScalar(ctx, token, tag, onError); - if (anchor) { - node.anchor = anchor.source.substring(1); - if (node.anchor === '') - onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string'); - } - if (spaceBefore) - node.spaceBefore = true; - if (comment) - node.comment = comment; - return node; -} -function composeAlias({ options }, { offset, source, end }, onError) { - const alias = new Alias.Alias(source.substring(1)); - if (alias.source === '') - onError(offset, 'BAD_ALIAS', 'Alias cannot be an empty string'); - if (alias.source.endsWith(':')) - onError(offset + source.length - 1, 'BAD_ALIAS', 'Alias ending in : is ambiguous', true); - const valueEnd = offset + source.length; - const re = resolveEnd.resolveEnd(end, valueEnd, options.strict, onError); - alias.range = [offset, valueEnd, re.offset]; - if (re.comment) - alias.comment = re.comment; - return alias; -} - -exports.composeEmptyNode = composeEmptyNode; -exports.composeNode = composeNode; - - -/***/ }), - -/***/ 4766: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(1399); -var Scalar = __nccwpck_require__(9338); -var resolveBlockScalar = __nccwpck_require__(9485); -var resolveFlowScalar = __nccwpck_require__(7578); - -function composeScalar(ctx, token, tagToken, onError) { - const { value, type, comment, range } = token.type === 'block-scalar' - ? resolveBlockScalar.resolveBlockScalar(token, ctx.options.strict, onError) - : resolveFlowScalar.resolveFlowScalar(token, ctx.options.strict, onError); - const tagName = tagToken - ? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg)) - : null; - const tag = tagToken && tagName - ? findScalarTagByName(ctx.schema, value, tagName, tagToken, onError) - : token.type === 'scalar' - ? findScalarTagByTest(ctx, value, token, onError) - : ctx.schema[Node.SCALAR]; - let scalar; - try { - const res = tag.resolve(value, msg => onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg), ctx.options); - scalar = Node.isScalar(res) ? res : new Scalar.Scalar(res); - } - catch (error) { - const msg = error instanceof Error ? error.message : String(error); - onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg); - scalar = new Scalar.Scalar(value); - } - scalar.range = range; - scalar.source = value; - if (type) - scalar.type = type; - if (tagName) - scalar.tag = tagName; - if (tag.format) - scalar.format = tag.format; - if (comment) - scalar.comment = comment; - return scalar; -} -function findScalarTagByName(schema, value, tagName, tagToken, onError) { - if (tagName === '!') - return schema[Node.SCALAR]; // non-specific tag - const matchWithTest = []; - for (const tag of schema.tags) { - if (!tag.collection && tag.tag === tagName) { - if (tag.default && tag.test) - matchWithTest.push(tag); - else - return tag; - } - } - for (const tag of matchWithTest) - if (tag.test?.test(value)) - return tag; - const kt = schema.knownTags[tagName]; - if (kt && !kt.collection) { - // Ensure that the known tag is available for stringifying, - // but does not get used by default. - schema.tags.push(Object.assign({}, kt, { default: false, test: undefined })); - return kt; - } - onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str'); - return schema[Node.SCALAR]; -} -function findScalarTagByTest({ directives, schema }, value, token, onError) { - const tag = schema.tags.find(tag => tag.default && tag.test?.test(value)) || schema[Node.SCALAR]; - if (schema.compat) { - const compat = schema.compat.find(tag => tag.default && tag.test?.test(value)) ?? - schema[Node.SCALAR]; - if (tag.tag !== compat.tag) { - const ts = directives.tagString(tag.tag); - const cs = directives.tagString(compat.tag); - const msg = `Value may be parsed as either ${ts} or ${cs}`; - onError(token, 'TAG_RESOLVE_FAILED', msg, true); - } - } - return tag; -} - -exports.composeScalar = composeScalar; - - -/***/ }), - -/***/ 9493: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var directives = __nccwpck_require__(5400); -var Document = __nccwpck_require__(42); -var errors = __nccwpck_require__(4236); -var Node = __nccwpck_require__(1399); -var composeDoc = __nccwpck_require__(5050); -var resolveEnd = __nccwpck_require__(1250); - -function getErrorPos(src) { - if (typeof src === 'number') - return [src, src + 1]; - if (Array.isArray(src)) - return src.length === 2 ? src : [src[0], src[1]]; - const { offset, source } = src; - return [offset, offset + (typeof source === 'string' ? source.length : 1)]; -} -function parsePrelude(prelude) { - let comment = ''; - let atComment = false; - let afterEmptyLine = false; - for (let i = 0; i < prelude.length; ++i) { - const source = prelude[i]; - switch (source[0]) { - case '#': - comment += - (comment === '' ? '' : afterEmptyLine ? '\n\n' : '\n') + - (source.substring(1) || ' '); - atComment = true; - afterEmptyLine = false; - break; - case '%': - if (prelude[i + 1]?.[0] !== '#') - i += 1; - atComment = false; - break; - default: - // This may be wrong after doc-end, but in that case it doesn't matter - if (!atComment) - afterEmptyLine = true; - atComment = false; - } - } - return { comment, afterEmptyLine }; -} -/** - * Compose a stream of CST nodes into a stream of YAML Documents. - * - * ```ts - * import { Composer, Parser } from 'yaml' - * - * const src: string = ... - * const tokens = new Parser().parse(src) - * const docs = new Composer().compose(tokens) - * ``` - */ -class Composer { - constructor(options = {}) { - this.doc = null; - this.atDirectives = false; - this.prelude = []; - this.errors = []; - this.warnings = []; - this.onError = (source, code, message, warning) => { - const pos = getErrorPos(source); - if (warning) - this.warnings.push(new errors.YAMLWarning(pos, code, message)); - else - this.errors.push(new errors.YAMLParseError(pos, code, message)); - }; - // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing - this.directives = new directives.Directives({ version: options.version || '1.2' }); - this.options = options; - } - decorate(doc, afterDoc) { - const { comment, afterEmptyLine } = parsePrelude(this.prelude); - //console.log({ dc: doc.comment, prelude, comment }) - if (comment) { - const dc = doc.contents; - if (afterDoc) { - doc.comment = doc.comment ? `${doc.comment}\n${comment}` : comment; - } - else if (afterEmptyLine || doc.directives.docStart || !dc) { - doc.commentBefore = comment; - } - else if (Node.isCollection(dc) && !dc.flow && dc.items.length > 0) { - let it = dc.items[0]; - if (Node.isPair(it)) - it = it.key; - const cb = it.commentBefore; - it.commentBefore = cb ? `${comment}\n${cb}` : comment; - } - else { - const cb = dc.commentBefore; - dc.commentBefore = cb ? `${comment}\n${cb}` : comment; - } - } - if (afterDoc) { - Array.prototype.push.apply(doc.errors, this.errors); - Array.prototype.push.apply(doc.warnings, this.warnings); - } - else { - doc.errors = this.errors; - doc.warnings = this.warnings; - } - this.prelude = []; - this.errors = []; - this.warnings = []; - } - /** - * Current stream status information. - * - * Mostly useful at the end of input for an empty stream. - */ - streamInfo() { - return { - comment: parsePrelude(this.prelude).comment, - directives: this.directives, - errors: this.errors, - warnings: this.warnings - }; - } - /** - * Compose tokens into documents. - * - * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document. - * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly. - */ - *compose(tokens, forceDoc = false, endOffset = -1) { - for (const token of tokens) - yield* this.next(token); - yield* this.end(forceDoc, endOffset); - } - /** Advance the composer by one CST token. */ - *next(token) { - if (process.env.LOG_STREAM) - console.dir(token, { depth: null }); - switch (token.type) { - case 'directive': - this.directives.add(token.source, (offset, message, warning) => { - const pos = getErrorPos(token); - pos[0] += offset; - this.onError(pos, 'BAD_DIRECTIVE', message, warning); - }); - this.prelude.push(token.source); - this.atDirectives = true; - break; - case 'document': { - const doc = composeDoc.composeDoc(this.options, this.directives, token, this.onError); - if (this.atDirectives && !doc.directives.docStart) - this.onError(token, 'MISSING_CHAR', 'Missing directives-end/doc-start indicator line'); - this.decorate(doc, false); - if (this.doc) - yield this.doc; - this.doc = doc; - this.atDirectives = false; - break; - } - case 'byte-order-mark': - case 'space': - break; - case 'comment': - case 'newline': - this.prelude.push(token.source); - break; - case 'error': { - const msg = token.source - ? `${token.message}: ${JSON.stringify(token.source)}` - : token.message; - const error = new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg); - if (this.atDirectives || !this.doc) - this.errors.push(error); - else - this.doc.errors.push(error); - break; - } - case 'doc-end': { - if (!this.doc) { - const msg = 'Unexpected doc-end without preceding document'; - this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg)); - break; - } - this.doc.directives.docEnd = true; - const end = resolveEnd.resolveEnd(token.end, token.offset + token.source.length, this.doc.options.strict, this.onError); - this.decorate(this.doc, true); - if (end.comment) { - const dc = this.doc.comment; - this.doc.comment = dc ? `${dc}\n${end.comment}` : end.comment; - } - this.doc.range[2] = end.offset; - break; - } - default: - this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', `Unsupported token ${token.type}`)); - } - } - /** - * Call at end of input to yield any remaining document. - * - * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document. - * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly. - */ - *end(forceDoc = false, endOffset = -1) { - if (this.doc) { - this.decorate(this.doc, true); - yield this.doc; - this.doc = null; - } - else if (forceDoc) { - const opts = Object.assign({ _directives: this.directives }, this.options); - const doc = new Document.Document(undefined, opts); - if (this.atDirectives) - this.onError(endOffset, 'MISSING_CHAR', 'Missing directives-end indicator line'); - doc.range = [0, endOffset, endOffset]; - this.decorate(doc, false); - yield doc; - } - } -} - -exports.Composer = Composer; - - -/***/ }), - -/***/ 2986: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Pair = __nccwpck_require__(246); -var YAMLMap = __nccwpck_require__(6011); -var resolveProps = __nccwpck_require__(6985); -var utilContainsNewline = __nccwpck_require__(976); -var utilFlowIndentCheck = __nccwpck_require__(3669); -var utilMapIncludes = __nccwpck_require__(6899); - -const startColMsg = 'All mapping items must start at the same column'; -function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError) { - const map = new YAMLMap.YAMLMap(ctx.schema); - if (ctx.atRoot) - ctx.atRoot = false; - let offset = bm.offset; - for (const collItem of bm.items) { - const { start, key, sep, value } = collItem; - // key properties - const keyProps = resolveProps.resolveProps(start, { - indicator: 'explicit-key-ind', - next: key ?? sep?.[0], - offset, - onError, - startOnNewline: true - }); - const implicitKey = !keyProps.found; - if (implicitKey) { - if (key) { - if (key.type === 'block-seq') - onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key'); - else if ('indent' in key && key.indent !== bm.indent) - onError(offset, 'BAD_INDENT', startColMsg); - } - if (!keyProps.anchor && !keyProps.tag && !sep) { - // TODO: assert being at last item? - if (keyProps.comment) { - if (map.comment) - map.comment += '\n' + keyProps.comment; - else - map.comment = keyProps.comment; - } - continue; - } - if (keyProps.hasNewlineAfterProp || utilContainsNewline.containsNewline(key)) { - onError(key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line'); - } - } - else if (keyProps.found?.indent !== bm.indent) { - onError(offset, 'BAD_INDENT', startColMsg); - } - // key value - const keyStart = keyProps.end; - const keyNode = key - ? composeNode(ctx, key, keyProps, onError) - : composeEmptyNode(ctx, keyStart, start, null, keyProps, onError); - if (ctx.schema.compat) - utilFlowIndentCheck.flowIndentCheck(bm.indent, key, onError); - if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode)) - onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique'); - // value properties - const valueProps = resolveProps.resolveProps(sep ?? [], { - indicator: 'map-value-ind', - next: value, - offset: keyNode.range[2], - onError, - startOnNewline: !key || key.type === 'block-scalar' - }); - offset = valueProps.end; - if (valueProps.found) { - if (implicitKey) { - if (value?.type === 'block-map' && !valueProps.hasNewline) - onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings'); - if (ctx.options.strict && - keyProps.start < valueProps.found.offset - 1024) - onError(keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key'); - } - // value value - const valueNode = value - ? composeNode(ctx, value, valueProps, onError) - : composeEmptyNode(ctx, offset, sep, null, valueProps, onError); - if (ctx.schema.compat) - utilFlowIndentCheck.flowIndentCheck(bm.indent, value, onError); - offset = valueNode.range[2]; - const pair = new Pair.Pair(keyNode, valueNode); - if (ctx.options.keepSourceTokens) - pair.srcToken = collItem; - map.items.push(pair); - } - else { - // key with no value - if (implicitKey) - onError(keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values'); - if (valueProps.comment) { - if (keyNode.comment) - keyNode.comment += '\n' + valueProps.comment; - else - keyNode.comment = valueProps.comment; - } - const pair = new Pair.Pair(keyNode); - if (ctx.options.keepSourceTokens) - pair.srcToken = collItem; - map.items.push(pair); - } - } - map.range = [bm.offset, offset, offset]; - return map; -} - -exports.resolveBlockMap = resolveBlockMap; - - -/***/ }), - -/***/ 9485: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(9338); - -function resolveBlockScalar(scalar, strict, onError) { - const start = scalar.offset; - const header = parseBlockScalarHeader(scalar, strict, onError); - if (!header) - return { value: '', type: null, comment: '', range: [start, start, start] }; - const type = header.mode === '>' ? Scalar.Scalar.BLOCK_FOLDED : Scalar.Scalar.BLOCK_LITERAL; - const lines = scalar.source ? splitLines(scalar.source) : []; - // determine the end of content & start of chomping - let chompStart = lines.length; - for (let i = lines.length - 1; i >= 0; --i) { - const content = lines[i][1]; - if (content === '' || content === '\r') - chompStart = i; - else - break; - } - // shortcut for empty contents - if (chompStart === 0) { - const value = header.chomp === '+' && lines.length > 0 - ? '\n'.repeat(Math.max(1, lines.length - 1)) - : ''; - let end = start + header.length; - if (scalar.source) - end += scalar.source.length; - return { value, type, comment: header.comment, range: [start, end, end] }; - } - // find the indentation level to trim from start - let trimIndent = scalar.indent + header.indent; - let offset = scalar.offset + header.length; - let contentStart = 0; - for (let i = 0; i < chompStart; ++i) { - const [indent, content] = lines[i]; - if (content === '' || content === '\r') { - if (header.indent === 0 && indent.length > trimIndent) - trimIndent = indent.length; - } - else { - if (indent.length < trimIndent) { - const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator'; - onError(offset + indent.length, 'MISSING_CHAR', message); - } - if (header.indent === 0) - trimIndent = indent.length; - contentStart = i; - break; - } - offset += indent.length + content.length + 1; - } - // include trailing more-indented empty lines in content - for (let i = lines.length - 1; i >= chompStart; --i) { - if (lines[i][0].length > trimIndent) - chompStart = i + 1; - } - let value = ''; - let sep = ''; - let prevMoreIndented = false; - // leading whitespace is kept intact - for (let i = 0; i < contentStart; ++i) - value += lines[i][0].slice(trimIndent) + '\n'; - for (let i = contentStart; i < chompStart; ++i) { - let [indent, content] = lines[i]; - offset += indent.length + content.length + 1; - const crlf = content[content.length - 1] === '\r'; - if (crlf) - content = content.slice(0, -1); - /* istanbul ignore if already caught in lexer */ - if (content && indent.length < trimIndent) { - const src = header.indent - ? 'explicit indentation indicator' - : 'first line'; - const message = `Block scalar lines must not be less indented than their ${src}`; - onError(offset - content.length - (crlf ? 2 : 1), 'BAD_INDENT', message); - indent = ''; - } - if (type === Scalar.Scalar.BLOCK_LITERAL) { - value += sep + indent.slice(trimIndent) + content; - sep = '\n'; - } - else if (indent.length > trimIndent || content[0] === '\t') { - // more-indented content within a folded block - if (sep === ' ') - sep = '\n'; - else if (!prevMoreIndented && sep === '\n') - sep = '\n\n'; - value += sep + indent.slice(trimIndent) + content; - sep = '\n'; - prevMoreIndented = true; - } - else if (content === '') { - // empty line - if (sep === '\n') - value += '\n'; - else - sep = '\n'; - } - else { - value += sep + content; - sep = ' '; - prevMoreIndented = false; - } - } - switch (header.chomp) { - case '-': - break; - case '+': - for (let i = chompStart; i < lines.length; ++i) - value += '\n' + lines[i][0].slice(trimIndent); - if (value[value.length - 1] !== '\n') - value += '\n'; - break; - default: - value += '\n'; - } - const end = start + header.length + scalar.source.length; - return { value, type, comment: header.comment, range: [start, end, end] }; -} -function parseBlockScalarHeader({ offset, props }, strict, onError) { - /* istanbul ignore if should not happen */ - if (props[0].type !== 'block-scalar-header') { - onError(props[0], 'IMPOSSIBLE', 'Block scalar header not found'); - return null; - } - const { source } = props[0]; - const mode = source[0]; - let indent = 0; - let chomp = ''; - let error = -1; - for (let i = 1; i < source.length; ++i) { - const ch = source[i]; - if (!chomp && (ch === '-' || ch === '+')) - chomp = ch; - else { - const n = Number(ch); - if (!indent && n) - indent = n; - else if (error === -1) - error = offset + i; - } - } - if (error !== -1) - onError(error, 'UNEXPECTED_TOKEN', `Block scalar header includes extra characters: ${source}`); - let hasSpace = false; - let comment = ''; - let length = source.length; - for (let i = 1; i < props.length; ++i) { - const token = props[i]; - switch (token.type) { - case 'space': - hasSpace = true; - // fallthrough - case 'newline': - length += token.source.length; - break; - case 'comment': - if (strict && !hasSpace) { - const message = 'Comments must be separated from other tokens by white space characters'; - onError(token, 'MISSING_CHAR', message); - } - length += token.source.length; - comment = token.source.substring(1); - break; - case 'error': - onError(token, 'UNEXPECTED_TOKEN', token.message); - length += token.source.length; - break; - /* istanbul ignore next should not happen */ - default: { - const message = `Unexpected token in block scalar header: ${token.type}`; - onError(token, 'UNEXPECTED_TOKEN', message); - const ts = token.source; - if (ts && typeof ts === 'string') - length += ts.length; - } - } - } - return { mode, indent, chomp, comment, length }; -} -/** @returns Array of lines split up as `[indent, content]` */ -function splitLines(source) { - const split = source.split(/\n( *)/); - const first = split[0]; - const m = first.match(/^( *)/); - const line0 = m?.[1] - ? [m[1], first.slice(m[1].length)] - : ['', first]; - const lines = [line0]; - for (let i = 1; i < split.length; i += 2) - lines.push([split[i], split[i + 1]]); - return lines; -} - -exports.resolveBlockScalar = resolveBlockScalar; - - -/***/ }), - -/***/ 2289: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var YAMLSeq = __nccwpck_require__(5161); -var resolveProps = __nccwpck_require__(6985); -var utilFlowIndentCheck = __nccwpck_require__(3669); - -function resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError) { - const seq = new YAMLSeq.YAMLSeq(ctx.schema); - if (ctx.atRoot) - ctx.atRoot = false; - let offset = bs.offset; - for (const { start, value } of bs.items) { - const props = resolveProps.resolveProps(start, { - indicator: 'seq-item-ind', - next: value, - offset, - onError, - startOnNewline: true - }); - offset = props.end; - if (!props.found) { - if (props.anchor || props.tag || value) { - if (value && value.type === 'block-seq') - onError(offset, 'BAD_INDENT', 'All sequence items must start at the same column'); - else - onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator'); - } - else { - // TODO: assert being at last item? - if (props.comment) - seq.comment = props.comment; - continue; - } - } - const node = value - ? composeNode(ctx, value, props, onError) - : composeEmptyNode(ctx, offset, start, null, props, onError); - if (ctx.schema.compat) - utilFlowIndentCheck.flowIndentCheck(bs.indent, value, onError); - offset = node.range[2]; - seq.items.push(node); - } - seq.range = [bs.offset, offset, offset]; - return seq; -} - -exports.resolveBlockSeq = resolveBlockSeq; - - -/***/ }), - -/***/ 1250: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -function resolveEnd(end, offset, reqSpace, onError) { - let comment = ''; - if (end) { - let hasSpace = false; - let sep = ''; - for (const token of end) { - const { source, type } = token; - switch (type) { - case 'space': - hasSpace = true; - break; - case 'comment': { - if (reqSpace && !hasSpace) - onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters'); - const cb = source.substring(1) || ' '; - if (!comment) - comment = cb; - else - comment += sep + cb; - sep = ''; - break; - } - case 'newline': - if (comment) - sep += source; - hasSpace = true; - break; - default: - onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${type} at node end`); - } - offset += source.length; - } - } - return { comment, offset }; -} - -exports.resolveEnd = resolveEnd; - - -/***/ }), - -/***/ 45: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(1399); -var Pair = __nccwpck_require__(246); -var YAMLMap = __nccwpck_require__(6011); -var YAMLSeq = __nccwpck_require__(5161); -var resolveEnd = __nccwpck_require__(1250); -var resolveProps = __nccwpck_require__(6985); -var utilContainsNewline = __nccwpck_require__(976); -var utilMapIncludes = __nccwpck_require__(6899); - -const blockMsg = 'Block collections are not allowed within flow collections'; -const isBlock = (token) => token && (token.type === 'block-map' || token.type === 'block-seq'); -function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onError) { - const isMap = fc.start.source === '{'; - const fcName = isMap ? 'flow map' : 'flow sequence'; - const coll = isMap - ? new YAMLMap.YAMLMap(ctx.schema) - : new YAMLSeq.YAMLSeq(ctx.schema); - coll.flow = true; - const atRoot = ctx.atRoot; - if (atRoot) - ctx.atRoot = false; - let offset = fc.offset + fc.start.source.length; - for (let i = 0; i < fc.items.length; ++i) { - const collItem = fc.items[i]; - const { start, key, sep, value } = collItem; - const props = resolveProps.resolveProps(start, { - flow: fcName, - indicator: 'explicit-key-ind', - next: key ?? sep?.[0], - offset, - onError, - startOnNewline: false - }); - if (!props.found) { - if (!props.anchor && !props.tag && !sep && !value) { - if (i === 0 && props.comma) - onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`); - else if (i < fc.items.length - 1) - onError(props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}`); - if (props.comment) { - if (coll.comment) - coll.comment += '\n' + props.comment; - else - coll.comment = props.comment; - } - offset = props.end; - continue; - } - if (!isMap && ctx.options.strict && utilContainsNewline.containsNewline(key)) - onError(key, // checked by containsNewline() - 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line'); - } - if (i === 0) { - if (props.comma) - onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`); - } - else { - if (!props.comma) - onError(props.start, 'MISSING_CHAR', `Missing , between ${fcName} items`); - if (props.comment) { - let prevItemComment = ''; - loop: for (const st of start) { - switch (st.type) { - case 'comma': - case 'space': - break; - case 'comment': - prevItemComment = st.source.substring(1); - break loop; - default: - break loop; - } - } - if (prevItemComment) { - let prev = coll.items[coll.items.length - 1]; - if (Node.isPair(prev)) - prev = prev.value ?? prev.key; - if (prev.comment) - prev.comment += '\n' + prevItemComment; - else - prev.comment = prevItemComment; - props.comment = props.comment.substring(prevItemComment.length + 1); - } - } - } - if (!isMap && !sep && !props.found) { - // item is a value in a seq - // → key & sep are empty, start does not include ? or : - const valueNode = value - ? composeNode(ctx, value, props, onError) - : composeEmptyNode(ctx, props.end, sep, null, props, onError); - coll.items.push(valueNode); - offset = valueNode.range[2]; - if (isBlock(value)) - onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg); - } - else { - // item is a key+value pair - // key value - const keyStart = props.end; - const keyNode = key - ? composeNode(ctx, key, props, onError) - : composeEmptyNode(ctx, keyStart, start, null, props, onError); - if (isBlock(key)) - onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg); - // value properties - const valueProps = resolveProps.resolveProps(sep ?? [], { - flow: fcName, - indicator: 'map-value-ind', - next: value, - offset: keyNode.range[2], - onError, - startOnNewline: false - }); - if (valueProps.found) { - if (!isMap && !props.found && ctx.options.strict) { - if (sep) - for (const st of sep) { - if (st === valueProps.found) - break; - if (st.type === 'newline') { - onError(st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line'); - break; - } - } - if (props.start < valueProps.found.offset - 1024) - onError(valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key'); - } - } - else if (value) { - if ('source' in value && value.source && value.source[0] === ':') - onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`); - else - onError(valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items`); - } - // value value - const valueNode = value - ? composeNode(ctx, value, valueProps, onError) - : valueProps.found - ? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError) - : null; - if (valueNode) { - if (isBlock(value)) - onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg); - } - else if (valueProps.comment) { - if (keyNode.comment) - keyNode.comment += '\n' + valueProps.comment; - else - keyNode.comment = valueProps.comment; - } - const pair = new Pair.Pair(keyNode, valueNode); - if (ctx.options.keepSourceTokens) - pair.srcToken = collItem; - if (isMap) { - const map = coll; - if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode)) - onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique'); - map.items.push(pair); - } - else { - const map = new YAMLMap.YAMLMap(ctx.schema); - map.flow = true; - map.items.push(pair); - coll.items.push(map); - } - offset = valueNode ? valueNode.range[2] : valueProps.end; - } - } - const expectedEnd = isMap ? '}' : ']'; - const [ce, ...ee] = fc.end; - let cePos = offset; - if (ce && ce.source === expectedEnd) - cePos = ce.offset + ce.source.length; - else { - const name = fcName[0].toUpperCase() + fcName.substring(1); - const msg = atRoot - ? `${name} must end with a ${expectedEnd}` - : `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}`; - onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg); - if (ce && ce.source.length !== 1) - ee.unshift(ce); - } - if (ee.length > 0) { - const end = resolveEnd.resolveEnd(ee, cePos, ctx.options.strict, onError); - if (end.comment) { - if (coll.comment) - coll.comment += '\n' + end.comment; - else - coll.comment = end.comment; - } - coll.range = [fc.offset, cePos, end.offset]; - } - else { - coll.range = [fc.offset, cePos, cePos]; - } - return coll; -} - -exports.resolveFlowCollection = resolveFlowCollection; - - -/***/ }), - -/***/ 7578: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(9338); -var resolveEnd = __nccwpck_require__(1250); - -function resolveFlowScalar(scalar, strict, onError) { - const { offset, type, source, end } = scalar; - let _type; - let value; - const _onError = (rel, code, msg) => onError(offset + rel, code, msg); - switch (type) { - case 'scalar': - _type = Scalar.Scalar.PLAIN; - value = plainValue(source, _onError); - break; - case 'single-quoted-scalar': - _type = Scalar.Scalar.QUOTE_SINGLE; - value = singleQuotedValue(source, _onError); - break; - case 'double-quoted-scalar': - _type = Scalar.Scalar.QUOTE_DOUBLE; - value = doubleQuotedValue(source, _onError); - break; - /* istanbul ignore next should not happen */ - default: - onError(scalar, 'UNEXPECTED_TOKEN', `Expected a flow scalar value, but found: ${type}`); - return { - value: '', - type: null, - comment: '', - range: [offset, offset + source.length, offset + source.length] - }; - } - const valueEnd = offset + source.length; - const re = resolveEnd.resolveEnd(end, valueEnd, strict, onError); - return { - value, - type: _type, - comment: re.comment, - range: [offset, valueEnd, re.offset] - }; -} -function plainValue(source, onError) { - let badChar = ''; - switch (source[0]) { - /* istanbul ignore next should not happen */ - case '\t': - badChar = 'a tab character'; - break; - case ',': - badChar = 'flow indicator character ,'; - break; - case '%': - badChar = 'directive indicator character %'; - break; - case '|': - case '>': { - badChar = `block scalar indicator ${source[0]}`; - break; - } - case '@': - case '`': { - badChar = `reserved character ${source[0]}`; - break; - } - } - if (badChar) - onError(0, 'BAD_SCALAR_START', `Plain value cannot start with ${badChar}`); - return foldLines(source); -} -function singleQuotedValue(source, onError) { - if (source[source.length - 1] !== "'" || source.length === 1) - onError(source.length, 'MISSING_CHAR', "Missing closing 'quote"); - return foldLines(source.slice(1, -1)).replace(/''/g, "'"); -} -function foldLines(source) { - /** - * The negative lookbehind here and in the `re` RegExp is to - * prevent causing a polynomial search time in certain cases. - * - * The try-catch is for Safari, which doesn't support this yet: - * https://caniuse.com/js-regexp-lookbehind - */ - let first, line; - try { - first = new RegExp('(.*?)(? wsStart ? source.slice(wsStart, i + 1) : ch; - } - else { - res += ch; - } - } - if (source[source.length - 1] !== '"' || source.length === 1) - onError(source.length, 'MISSING_CHAR', 'Missing closing "quote'); - return res; -} -/** - * Fold a single newline into a space, multiple newlines to N - 1 newlines. - * Presumes `source[offset] === '\n'` - */ -function foldNewline(source, offset) { - let fold = ''; - let ch = source[offset + 1]; - while (ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r') { - if (ch === '\r' && source[offset + 2] !== '\n') - break; - if (ch === '\n') - fold += '\n'; - offset += 1; - ch = source[offset + 1]; - } - if (!fold) - fold = ' '; - return { fold, offset }; -} -const escapeCodes = { - '0': '\0', - a: '\x07', - b: '\b', - e: '\x1b', - f: '\f', - n: '\n', - r: '\r', - t: '\t', - v: '\v', - N: '\u0085', - _: '\u00a0', - L: '\u2028', - P: '\u2029', - ' ': ' ', - '"': '"', - '/': '/', - '\\': '\\', - '\t': '\t' -}; -function parseCharCode(source, offset, length, onError) { - const cc = source.substr(offset, length); - const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc); - const code = ok ? parseInt(cc, 16) : NaN; - if (isNaN(code)) { - const raw = source.substr(offset - 2, length + 2); - onError(offset - 2, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`); - return raw; - } - return String.fromCodePoint(code); -} - -exports.resolveFlowScalar = resolveFlowScalar; - - -/***/ }), - -/***/ 6985: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnNewline }) { - let spaceBefore = false; - let atNewline = startOnNewline; - let hasSpace = startOnNewline; - let comment = ''; - let commentSep = ''; - let hasNewline = false; - let hasNewlineAfterProp = false; - let reqSpace = false; - let anchor = null; - let tag = null; - let comma = null; - let found = null; - let start = null; - for (const token of tokens) { - if (reqSpace) { - if (token.type !== 'space' && - token.type !== 'newline' && - token.type !== 'comma') - onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space'); - reqSpace = false; - } - switch (token.type) { - case 'space': - // At the doc level, tabs at line start may be parsed - // as leading white space rather than indentation. - // In a flow collection, only the parser handles indent. - if (!flow && - atNewline && - indicator !== 'doc-start' && - token.source[0] === '\t') - onError(token, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation'); - hasSpace = true; - break; - case 'comment': { - if (!hasSpace) - onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters'); - const cb = token.source.substring(1) || ' '; - if (!comment) - comment = cb; - else - comment += commentSep + cb; - commentSep = ''; - atNewline = false; - break; - } - case 'newline': - if (atNewline) { - if (comment) - comment += token.source; - else - spaceBefore = true; - } - else - commentSep += token.source; - atNewline = true; - hasNewline = true; - if (anchor || tag) - hasNewlineAfterProp = true; - hasSpace = true; - break; - case 'anchor': - if (anchor) - onError(token, 'MULTIPLE_ANCHORS', 'A node can have at most one anchor'); - if (token.source.endsWith(':')) - onError(token.offset + token.source.length - 1, 'BAD_ALIAS', 'Anchor ending in : is ambiguous', true); - anchor = token; - if (start === null) - start = token.offset; - atNewline = false; - hasSpace = false; - reqSpace = true; - break; - case 'tag': { - if (tag) - onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag'); - tag = token; - if (start === null) - start = token.offset; - atNewline = false; - hasSpace = false; - reqSpace = true; - break; - } - case indicator: - // Could here handle preceding comments differently - if (anchor || tag) - onError(token, 'BAD_PROP_ORDER', `Anchors and tags must be after the ${token.source} indicator`); - if (found) - onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow ?? 'collection'}`); - found = token; - atNewline = false; - hasSpace = false; - break; - case 'comma': - if (flow) { - if (comma) - onError(token, 'UNEXPECTED_TOKEN', `Unexpected , in ${flow}`); - comma = token; - atNewline = false; - hasSpace = false; - break; - } - // else fallthrough - default: - onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.type} token`); - atNewline = false; - hasSpace = false; - } - } - const last = tokens[tokens.length - 1]; - const end = last ? last.offset + last.source.length : offset; - if (reqSpace && - next && - next.type !== 'space' && - next.type !== 'newline' && - next.type !== 'comma' && - (next.type !== 'scalar' || next.source !== '')) - onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space'); - return { - comma, - found, - spaceBefore, - comment, - hasNewline, - hasNewlineAfterProp, - anchor, - tag, - end, - start: start ?? end - }; -} - -exports.resolveProps = resolveProps; - - -/***/ }), - -/***/ 976: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -function containsNewline(key) { - if (!key) - return null; - switch (key.type) { - case 'alias': - case 'scalar': - case 'double-quoted-scalar': - case 'single-quoted-scalar': - if (key.source.includes('\n')) - return true; - if (key.end) - for (const st of key.end) - if (st.type === 'newline') - return true; - return false; - case 'flow-collection': - for (const it of key.items) { - for (const st of it.start) - if (st.type === 'newline') - return true; - if (it.sep) - for (const st of it.sep) - if (st.type === 'newline') - return true; - if (containsNewline(it.key) || containsNewline(it.value)) - return true; - } - return false; - default: - return true; - } -} - -exports.containsNewline = containsNewline; - - -/***/ }), - -/***/ 8781: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -function emptyScalarPosition(offset, before, pos) { - if (before) { - if (pos === null) - pos = before.length; - for (let i = pos - 1; i >= 0; --i) { - let st = before[i]; - switch (st.type) { - case 'space': - case 'comment': - case 'newline': - offset -= st.source.length; - continue; - } - // Technically, an empty scalar is immediately after the last non-empty - // node, but it's more useful to place it after any whitespace. - st = before[++i]; - while (st?.type === 'space') { - offset += st.source.length; - st = before[++i]; - } - break; - } - } - return offset; -} - -exports.emptyScalarPosition = emptyScalarPosition; - - -/***/ }), - -/***/ 3669: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var utilContainsNewline = __nccwpck_require__(976); - -function flowIndentCheck(indent, fc, onError) { - if (fc?.type === 'flow-collection') { - const end = fc.end[0]; - if (end.indent === indent && - (end.source === ']' || end.source === '}') && - utilContainsNewline.containsNewline(fc)) { - const msg = 'Flow end indicator should be more indented than parent'; - onError(end, 'BAD_INDENT', msg, true); - } - } -} - -exports.flowIndentCheck = flowIndentCheck; - - -/***/ }), - -/***/ 6899: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(1399); - -function mapIncludes(ctx, items, search) { - const { uniqueKeys } = ctx.options; - if (uniqueKeys === false) - return false; - const isEqual = typeof uniqueKeys === 'function' - ? uniqueKeys - : (a, b) => a === b || - (Node.isScalar(a) && - Node.isScalar(b) && - a.value === b.value && - !(a.value === '<<' && ctx.schema.merge)); - return items.some(pair => isEqual(pair.key, search)); -} - -exports.mapIncludes = mapIncludes; - - -/***/ }), - -/***/ 42: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Alias = __nccwpck_require__(5639); -var Collection = __nccwpck_require__(3466); -var Node = __nccwpck_require__(1399); -var Pair = __nccwpck_require__(246); -var toJS = __nccwpck_require__(2463); -var Schema = __nccwpck_require__(6831); -var stringify = __nccwpck_require__(8409); -var stringifyDocument = __nccwpck_require__(5225); -var anchors = __nccwpck_require__(8459); -var applyReviver = __nccwpck_require__(3412); -var createNode = __nccwpck_require__(9652); -var directives = __nccwpck_require__(5400); - -class Document { - constructor(value, replacer, options) { - /** A comment before this Document */ - this.commentBefore = null; - /** A comment immediately after this Document */ - this.comment = null; - /** Errors encountered during parsing. */ - this.errors = []; - /** Warnings encountered during parsing. */ - this.warnings = []; - Object.defineProperty(this, Node.NODE_TYPE, { value: Node.DOC }); - let _replacer = null; - if (typeof replacer === 'function' || Array.isArray(replacer)) { - _replacer = replacer; - } - else if (options === undefined && replacer) { - options = replacer; - replacer = undefined; - } - const opt = Object.assign({ - intAsBigInt: false, - keepSourceTokens: false, - logLevel: 'warn', - prettyErrors: true, - strict: true, - uniqueKeys: true, - version: '1.2' - }, options); - this.options = opt; - let { version } = opt; - if (options?._directives) { - this.directives = options._directives.atDocument(); - if (this.directives.yaml.explicit) - version = this.directives.yaml.version; - } - else - this.directives = new directives.Directives({ version }); - this.setSchema(version, options); - if (value === undefined) - this.contents = null; - else { - this.contents = this.createNode(value, _replacer, options); - } - } - /** - * Create a deep copy of this Document and its contents. - * - * Custom Node values that inherit from `Object` still refer to their original instances. - */ - clone() { - const copy = Object.create(Document.prototype, { - [Node.NODE_TYPE]: { value: Node.DOC } - }); - copy.commentBefore = this.commentBefore; - copy.comment = this.comment; - copy.errors = this.errors.slice(); - copy.warnings = this.warnings.slice(); - copy.options = Object.assign({}, this.options); - if (this.directives) - copy.directives = this.directives.clone(); - copy.schema = this.schema.clone(); - copy.contents = Node.isNode(this.contents) - ? this.contents.clone(copy.schema) - : this.contents; - if (this.range) - copy.range = this.range.slice(); - return copy; - } - /** Adds a value to the document. */ - add(value) { - if (assertCollection(this.contents)) - this.contents.add(value); - } - /** Adds a value to the document. */ - addIn(path, value) { - if (assertCollection(this.contents)) - this.contents.addIn(path, value); - } - /** - * Create a new `Alias` node, ensuring that the target `node` has the required anchor. - * - * If `node` already has an anchor, `name` is ignored. - * Otherwise, the `node.anchor` value will be set to `name`, - * or if an anchor with that name is already present in the document, - * `name` will be used as a prefix for a new unique anchor. - * If `name` is undefined, the generated anchor will use 'a' as a prefix. - */ - createAlias(node, name) { - if (!node.anchor) { - const prev = anchors.anchorNames(this); - node.anchor = - // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing - !name || prev.has(name) ? anchors.findNewAnchor(name || 'a', prev) : name; - } - return new Alias.Alias(node.anchor); - } - createNode(value, replacer, options) { - let _replacer = undefined; - if (typeof replacer === 'function') { - value = replacer.call({ '': value }, '', value); - _replacer = replacer; - } - else if (Array.isArray(replacer)) { - const keyToStr = (v) => typeof v === 'number' || v instanceof String || v instanceof Number; - const asStr = replacer.filter(keyToStr).map(String); - if (asStr.length > 0) - replacer = replacer.concat(asStr); - _replacer = replacer; - } - else if (options === undefined && replacer) { - options = replacer; - replacer = undefined; - } - const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options ?? {}; - const { onAnchor, setAnchors, sourceObjects } = anchors.createNodeAnchors(this, - // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing - anchorPrefix || 'a'); - const ctx = { - aliasDuplicateObjects: aliasDuplicateObjects ?? true, - keepUndefined: keepUndefined ?? false, - onAnchor, - onTagObj, - replacer: _replacer, - schema: this.schema, - sourceObjects - }; - const node = createNode.createNode(value, tag, ctx); - if (flow && Node.isCollection(node)) - node.flow = true; - setAnchors(); - return node; - } - /** - * Convert a key and a value into a `Pair` using the current schema, - * recursively wrapping all values as `Scalar` or `Collection` nodes. - */ - createPair(key, value, options = {}) { - const k = this.createNode(key, null, options); - const v = this.createNode(value, null, options); - return new Pair.Pair(k, v); - } - /** - * Removes a value from the document. - * @returns `true` if the item was found and removed. - */ - delete(key) { - return assertCollection(this.contents) ? this.contents.delete(key) : false; - } - /** - * Removes a value from the document. - * @returns `true` if the item was found and removed. - */ - deleteIn(path) { - if (Collection.isEmptyPath(path)) { - if (this.contents == null) - return false; - this.contents = null; - return true; - } - return assertCollection(this.contents) - ? this.contents.deleteIn(path) - : false; - } - /** - * Returns item at `key`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - get(key, keepScalar) { - return Node.isCollection(this.contents) - ? this.contents.get(key, keepScalar) - : undefined; - } - /** - * Returns item at `path`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - getIn(path, keepScalar) { - if (Collection.isEmptyPath(path)) - return !keepScalar && Node.isScalar(this.contents) - ? this.contents.value - : this.contents; - return Node.isCollection(this.contents) - ? this.contents.getIn(path, keepScalar) - : undefined; - } - /** - * Checks if the document includes a value with the key `key`. - */ - has(key) { - return Node.isCollection(this.contents) ? this.contents.has(key) : false; - } - /** - * Checks if the document includes a value at `path`. - */ - hasIn(path) { - if (Collection.isEmptyPath(path)) - return this.contents !== undefined; - return Node.isCollection(this.contents) ? this.contents.hasIn(path) : false; - } - /** - * Sets a value in this document. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - set(key, value) { - if (this.contents == null) { - this.contents = Collection.collectionFromPath(this.schema, [key], value); - } - else if (assertCollection(this.contents)) { - this.contents.set(key, value); - } - } - /** - * Sets a value in this document. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - setIn(path, value) { - if (Collection.isEmptyPath(path)) - this.contents = value; - else if (this.contents == null) { - this.contents = Collection.collectionFromPath(this.schema, Array.from(path), value); - } - else if (assertCollection(this.contents)) { - this.contents.setIn(path, value); - } - } - /** - * Change the YAML version and schema used by the document. - * A `null` version disables support for directives, explicit tags, anchors, and aliases. - * It also requires the `schema` option to be given as a `Schema` instance value. - * - * Overrides all previously set schema options. - */ - setSchema(version, options = {}) { - if (typeof version === 'number') - version = String(version); - let opt; - switch (version) { - case '1.1': - if (this.directives) - this.directives.yaml.version = '1.1'; - else - this.directives = new directives.Directives({ version: '1.1' }); - opt = { merge: true, resolveKnownTags: false, schema: 'yaml-1.1' }; - break; - case '1.2': - case 'next': - if (this.directives) - this.directives.yaml.version = version; - else - this.directives = new directives.Directives({ version }); - opt = { merge: false, resolveKnownTags: true, schema: 'core' }; - break; - case null: - if (this.directives) - delete this.directives; - opt = null; - break; - default: { - const sv = JSON.stringify(version); - throw new Error(`Expected '1.1', '1.2' or null as first argument, but found: ${sv}`); - } - } - // Not using `instanceof Schema` to allow for duck typing - if (options.schema instanceof Object) - this.schema = options.schema; - else if (opt) - this.schema = new Schema.Schema(Object.assign(opt, options)); - else - throw new Error(`With a null YAML version, the { schema: Schema } option is required`); - } - // json & jsonArg are only used from toJSON() - toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver } = {}) { - const ctx = { - anchors: new Map(), - doc: this, - keep: !json, - mapAsMap: mapAsMap === true, - mapKeyWarned: false, - maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100, - stringify: stringify.stringify - }; - const res = toJS.toJS(this.contents, jsonArg ?? '', ctx); - if (typeof onAnchor === 'function') - for (const { count, res } of ctx.anchors.values()) - onAnchor(res, count); - return typeof reviver === 'function' - ? applyReviver.applyReviver(reviver, { '': res }, '', res) - : res; - } - /** - * A JSON representation of the document `contents`. - * - * @param jsonArg Used by `JSON.stringify` to indicate the array index or - * property name. - */ - toJSON(jsonArg, onAnchor) { - return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor }); - } - /** A YAML representation of the document. */ - toString(options = {}) { - if (this.errors.length > 0) - throw new Error('Document with errors cannot be stringified'); - if ('indent' in options && - (!Number.isInteger(options.indent) || Number(options.indent) <= 0)) { - const s = JSON.stringify(options.indent); - throw new Error(`"indent" option must be a positive integer, not ${s}`); - } - return stringifyDocument.stringifyDocument(this, options); - } -} -function assertCollection(contents) { - if (Node.isCollection(contents)) - return true; - throw new Error('Expected a YAML collection as document contents'); -} - -exports.Document = Document; - - -/***/ }), - -/***/ 8459: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(1399); -var visit = __nccwpck_require__(6796); - -/** - * Verify that the input string is a valid anchor. - * - * Will throw on errors. - */ -function anchorIsValid(anchor) { - if (/[\x00-\x19\s,[\]{}]/.test(anchor)) { - const sa = JSON.stringify(anchor); - const msg = `Anchor must not contain whitespace or control characters: ${sa}`; - throw new Error(msg); - } - return true; -} -function anchorNames(root) { - const anchors = new Set(); - visit.visit(root, { - Value(_key, node) { - if (node.anchor) - anchors.add(node.anchor); - } - }); - return anchors; -} -/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */ -function findNewAnchor(prefix, exclude) { - for (let i = 1; true; ++i) { - const name = `${prefix}${i}`; - if (!exclude.has(name)) - return name; - } -} -function createNodeAnchors(doc, prefix) { - const aliasObjects = []; - const sourceObjects = new Map(); - let prevAnchors = null; - return { - onAnchor: (source) => { - aliasObjects.push(source); - if (!prevAnchors) - prevAnchors = anchorNames(doc); - const anchor = findNewAnchor(prefix, prevAnchors); - prevAnchors.add(anchor); - return anchor; - }, - /** - * With circular references, the source node is only resolved after all - * of its child nodes are. This is why anchors are set only after all of - * the nodes have been created. - */ - setAnchors: () => { - for (const source of aliasObjects) { - const ref = sourceObjects.get(source); - if (typeof ref === 'object' && - ref.anchor && - (Node.isScalar(ref.node) || Node.isCollection(ref.node))) { - ref.node.anchor = ref.anchor; - } - else { - const error = new Error('Failed to resolve repeated object (this should not happen)'); - error.source = source; - throw error; - } - } - }, - sourceObjects - }; -} - -exports.anchorIsValid = anchorIsValid; -exports.anchorNames = anchorNames; -exports.createNodeAnchors = createNodeAnchors; -exports.findNewAnchor = findNewAnchor; - - -/***/ }), - -/***/ 3412: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -/** - * Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec, - * in section 24.5.1.1 "Runtime Semantics: InternalizeJSONProperty" of the - * 2021 edition: https://tc39.es/ecma262/#sec-json.parse - * - * Includes extensions for handling Map and Set objects. - */ -function applyReviver(reviver, obj, key, val) { - if (val && typeof val === 'object') { - if (Array.isArray(val)) { - for (let i = 0, len = val.length; i < len; ++i) { - const v0 = val[i]; - const v1 = applyReviver(reviver, val, String(i), v0); - if (v1 === undefined) - delete val[i]; - else if (v1 !== v0) - val[i] = v1; - } - } - else if (val instanceof Map) { - for (const k of Array.from(val.keys())) { - const v0 = val.get(k); - const v1 = applyReviver(reviver, val, k, v0); - if (v1 === undefined) - val.delete(k); - else if (v1 !== v0) - val.set(k, v1); - } - } - else if (val instanceof Set) { - for (const v0 of Array.from(val)) { - const v1 = applyReviver(reviver, val, v0, v0); - if (v1 === undefined) - val.delete(v0); - else if (v1 !== v0) { - val.delete(v0); - val.add(v1); - } - } - } - else { - for (const [k, v0] of Object.entries(val)) { - const v1 = applyReviver(reviver, val, k, v0); - if (v1 === undefined) - delete val[k]; - else if (v1 !== v0) - val[k] = v1; - } - } - } - return reviver.call(obj, key, val); -} - -exports.applyReviver = applyReviver; - - -/***/ }), - -/***/ 9652: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Alias = __nccwpck_require__(5639); -var Node = __nccwpck_require__(1399); -var Scalar = __nccwpck_require__(9338); - -const defaultTagPrefix = 'tag:yaml.org,2002:'; -function findTagObject(value, tagName, tags) { - if (tagName) { - const match = tags.filter(t => t.tag === tagName); - const tagObj = match.find(t => !t.format) ?? match[0]; - if (!tagObj) - throw new Error(`Tag ${tagName} not found`); - return tagObj; - } - return tags.find(t => t.identify?.(value) && !t.format); -} -function createNode(value, tagName, ctx) { - if (Node.isDocument(value)) - value = value.contents; - if (Node.isNode(value)) - return value; - if (Node.isPair(value)) { - const map = ctx.schema[Node.MAP].createNode?.(ctx.schema, null, ctx); - map.items.push(value); - return map; - } - if (value instanceof String || - value instanceof Number || - value instanceof Boolean || - (typeof BigInt === 'function' && value instanceof BigInt) // not supported everywhere - ) { - // https://tc39.es/ecma262/#sec-serializejsonproperty - value = value.valueOf(); - } - const { aliasDuplicateObjects, onAnchor, onTagObj, schema, sourceObjects } = ctx; - // Detect duplicate references to the same object & use Alias nodes for all - // after first. The `ref` wrapper allows for circular references to resolve. - let ref = undefined; - if (aliasDuplicateObjects && value && typeof value === 'object') { - ref = sourceObjects.get(value); - if (ref) { - if (!ref.anchor) - ref.anchor = onAnchor(value); - return new Alias.Alias(ref.anchor); - } - else { - ref = { anchor: null, node: null }; - sourceObjects.set(value, ref); - } - } - if (tagName?.startsWith('!!')) - tagName = defaultTagPrefix + tagName.slice(2); - let tagObj = findTagObject(value, tagName, schema.tags); - if (!tagObj) { - if (value && typeof value.toJSON === 'function') { - // eslint-disable-next-line @typescript-eslint/no-unsafe-call - value = value.toJSON(); - } - if (!value || typeof value !== 'object') { - const node = new Scalar.Scalar(value); - if (ref) - ref.node = node; - return node; - } - tagObj = - value instanceof Map - ? schema[Node.MAP] - : Symbol.iterator in Object(value) - ? schema[Node.SEQ] - : schema[Node.MAP]; - } - if (onTagObj) { - onTagObj(tagObj); - delete ctx.onTagObj; - } - const node = tagObj?.createNode - ? tagObj.createNode(ctx.schema, value, ctx) - : new Scalar.Scalar(value); - if (tagName) - node.tag = tagName; - if (ref) - ref.node = node; - return node; -} - -exports.createNode = createNode; - - -/***/ }), - -/***/ 5400: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(1399); -var visit = __nccwpck_require__(6796); - -const escapeChars = { - '!': '%21', - ',': '%2C', - '[': '%5B', - ']': '%5D', - '{': '%7B', - '}': '%7D' -}; -const escapeTagName = (tn) => tn.replace(/[!,[\]{}]/g, ch => escapeChars[ch]); -class Directives { - constructor(yaml, tags) { - /** - * The directives-end/doc-start marker `---`. If `null`, a marker may still be - * included in the document's stringified representation. - */ - this.docStart = null; - /** The doc-end marker `...`. */ - this.docEnd = false; - this.yaml = Object.assign({}, Directives.defaultYaml, yaml); - this.tags = Object.assign({}, Directives.defaultTags, tags); - } - clone() { - const copy = new Directives(this.yaml, this.tags); - copy.docStart = this.docStart; - return copy; - } - /** - * During parsing, get a Directives instance for the current document and - * update the stream state according to the current version's spec. - */ - atDocument() { - const res = new Directives(this.yaml, this.tags); - switch (this.yaml.version) { - case '1.1': - this.atNextDocument = true; - break; - case '1.2': - this.atNextDocument = false; - this.yaml = { - explicit: Directives.defaultYaml.explicit, - version: '1.2' - }; - this.tags = Object.assign({}, Directives.defaultTags); - break; - } - return res; - } - /** - * @param onError - May be called even if the action was successful - * @returns `true` on success - */ - add(line, onError) { - if (this.atNextDocument) { - this.yaml = { explicit: Directives.defaultYaml.explicit, version: '1.1' }; - this.tags = Object.assign({}, Directives.defaultTags); - this.atNextDocument = false; - } - const parts = line.trim().split(/[ \t]+/); - const name = parts.shift(); - switch (name) { - case '%TAG': { - if (parts.length !== 2) { - onError(0, '%TAG directive should contain exactly two parts'); - if (parts.length < 2) - return false; - } - const [handle, prefix] = parts; - this.tags[handle] = prefix; - return true; - } - case '%YAML': { - this.yaml.explicit = true; - if (parts.length !== 1) { - onError(0, '%YAML directive should contain exactly one part'); - return false; - } - const [version] = parts; - if (version === '1.1' || version === '1.2') { - this.yaml.version = version; - return true; - } - else { - const isValid = /^\d+\.\d+$/.test(version); - onError(6, `Unsupported YAML version ${version}`, isValid); - return false; - } - } - default: - onError(0, `Unknown directive ${name}`, true); - return false; - } - } - /** - * Resolves a tag, matching handles to those defined in %TAG directives. - * - * @returns Resolved tag, which may also be the non-specific tag `'!'` or a - * `'!local'` tag, or `null` if unresolvable. - */ - tagName(source, onError) { - if (source === '!') - return '!'; // non-specific tag - if (source[0] !== '!') { - onError(`Not a valid tag: ${source}`); - return null; - } - if (source[1] === '<') { - const verbatim = source.slice(2, -1); - if (verbatim === '!' || verbatim === '!!') { - onError(`Verbatim tags aren't resolved, so ${source} is invalid.`); - return null; - } - if (source[source.length - 1] !== '>') - onError('Verbatim tags must end with a >'); - return verbatim; - } - const [, handle, suffix] = source.match(/^(.*!)([^!]*)$/); - if (!suffix) - onError(`The ${source} tag has no suffix`); - const prefix = this.tags[handle]; - if (prefix) - return prefix + decodeURIComponent(suffix); - if (handle === '!') - return source; // local tag - onError(`Could not resolve tag: ${source}`); - return null; - } - /** - * Given a fully resolved tag, returns its printable string form, - * taking into account current tag prefixes and defaults. - */ - tagString(tag) { - for (const [handle, prefix] of Object.entries(this.tags)) { - if (tag.startsWith(prefix)) - return handle + escapeTagName(tag.substring(prefix.length)); - } - return tag[0] === '!' ? tag : `!<${tag}>`; - } - toString(doc) { - const lines = this.yaml.explicit - ? [`%YAML ${this.yaml.version || '1.2'}`] - : []; - const tagEntries = Object.entries(this.tags); - let tagNames; - if (doc && tagEntries.length > 0 && Node.isNode(doc.contents)) { - const tags = {}; - visit.visit(doc.contents, (_key, node) => { - if (Node.isNode(node) && node.tag) - tags[node.tag] = true; - }); - tagNames = Object.keys(tags); - } - else - tagNames = []; - for (const [handle, prefix] of tagEntries) { - if (handle === '!!' && prefix === 'tag:yaml.org,2002:') - continue; - if (!doc || tagNames.some(tn => tn.startsWith(prefix))) - lines.push(`%TAG ${handle} ${prefix}`); - } - return lines.join('\n'); - } -} -Directives.defaultYaml = { explicit: false, version: '1.2' }; -Directives.defaultTags = { '!!': 'tag:yaml.org,2002:' }; - -exports.Directives = Directives; - - -/***/ }), - -/***/ 4236: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -class YAMLError extends Error { - constructor(name, pos, code, message) { - super(); - this.name = name; - this.code = code; - this.message = message; - this.pos = pos; - } -} -class YAMLParseError extends YAMLError { - constructor(pos, code, message) { - super('YAMLParseError', pos, code, message); - } -} -class YAMLWarning extends YAMLError { - constructor(pos, code, message) { - super('YAMLWarning', pos, code, message); - } -} -const prettifyError = (src, lc) => (error) => { - if (error.pos[0] === -1) - return; - error.linePos = error.pos.map(pos => lc.linePos(pos)); - const { line, col } = error.linePos[0]; - error.message += ` at line ${line}, column ${col}`; - let ci = col - 1; - let lineStr = src - .substring(lc.lineStarts[line - 1], lc.lineStarts[line]) - .replace(/[\n\r]+$/, ''); - // Trim to max 80 chars, keeping col position near the middle - if (ci >= 60 && lineStr.length > 80) { - const trimStart = Math.min(ci - 39, lineStr.length - 79); - lineStr = '…' + lineStr.substring(trimStart); - ci -= trimStart - 1; - } - if (lineStr.length > 80) - lineStr = lineStr.substring(0, 79) + '…'; - // Include previous line in context if pointing at line start - if (line > 1 && /^ *$/.test(lineStr.substring(0, ci))) { - // Regexp won't match if start is trimmed - let prev = src.substring(lc.lineStarts[line - 2], lc.lineStarts[line - 1]); - if (prev.length > 80) - prev = prev.substring(0, 79) + '…\n'; - lineStr = prev + lineStr; - } - if (/[^ ]/.test(lineStr)) { - let count = 1; - const end = error.linePos[1]; - if (end && end.line === line && end.col > col) { - count = Math.min(end.col - col, 80 - ci); - } - const pointer = ' '.repeat(ci) + '^'.repeat(count); - error.message += `:\n\n${lineStr}\n${pointer}\n`; - } -}; - -exports.YAMLError = YAMLError; -exports.YAMLParseError = YAMLParseError; -exports.YAMLWarning = YAMLWarning; -exports.prettifyError = prettifyError; - - -/***/ }), - -/***/ 4083: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var composer = __nccwpck_require__(9493); -var Document = __nccwpck_require__(42); -var Schema = __nccwpck_require__(6831); -var errors = __nccwpck_require__(4236); -var Alias = __nccwpck_require__(5639); -var Node = __nccwpck_require__(1399); -var Pair = __nccwpck_require__(246); -var Scalar = __nccwpck_require__(9338); -var YAMLMap = __nccwpck_require__(6011); -var YAMLSeq = __nccwpck_require__(5161); -var cst = __nccwpck_require__(9169); -var lexer = __nccwpck_require__(5976); -var lineCounter = __nccwpck_require__(1929); -var parser = __nccwpck_require__(3328); -var publicApi = __nccwpck_require__(8649); -var visit = __nccwpck_require__(6796); - - - -exports.Composer = composer.Composer; -exports.Document = Document.Document; -exports.Schema = Schema.Schema; -exports.YAMLError = errors.YAMLError; -exports.YAMLParseError = errors.YAMLParseError; -exports.YAMLWarning = errors.YAMLWarning; -exports.Alias = Alias.Alias; -exports.isAlias = Node.isAlias; -exports.isCollection = Node.isCollection; -exports.isDocument = Node.isDocument; -exports.isMap = Node.isMap; -exports.isNode = Node.isNode; -exports.isPair = Node.isPair; -exports.isScalar = Node.isScalar; -exports.isSeq = Node.isSeq; -exports.Pair = Pair.Pair; -exports.Scalar = Scalar.Scalar; -exports.YAMLMap = YAMLMap.YAMLMap; -exports.YAMLSeq = YAMLSeq.YAMLSeq; -exports.CST = cst; -exports.Lexer = lexer.Lexer; -exports.LineCounter = lineCounter.LineCounter; -exports.Parser = parser.Parser; -exports.parse = publicApi.parse; -exports.parseAllDocuments = publicApi.parseAllDocuments; -exports.parseDocument = publicApi.parseDocument; -exports.stringify = publicApi.stringify; -exports.visit = visit.visit; -exports.visitAsync = visit.visitAsync; - - -/***/ }), - -/***/ 6909: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -function debug(logLevel, ...messages) { - if (logLevel === 'debug') - console.log(...messages); -} -function warn(logLevel, warning) { - if (logLevel === 'debug' || logLevel === 'warn') { - if (typeof process !== 'undefined' && process.emitWarning) - process.emitWarning(warning); - else - console.warn(warning); - } -} - -exports.debug = debug; -exports.warn = warn; - - -/***/ }), - -/***/ 5639: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var anchors = __nccwpck_require__(8459); -var visit = __nccwpck_require__(6796); -var Node = __nccwpck_require__(1399); - -class Alias extends Node.NodeBase { - constructor(source) { - super(Node.ALIAS); - this.source = source; - Object.defineProperty(this, 'tag', { - set() { - throw new Error('Alias nodes cannot have tags'); - } - }); - } - /** - * Resolve the value of this alias within `doc`, finding the last - * instance of the `source` anchor before this node. - */ - resolve(doc) { - let found = undefined; - visit.visit(doc, { - Node: (_key, node) => { - if (node === this) - return visit.visit.BREAK; - if (node.anchor === this.source) - found = node; - } - }); - return found; - } - toJSON(_arg, ctx) { - if (!ctx) - return { source: this.source }; - const { anchors, doc, maxAliasCount } = ctx; - const source = this.resolve(doc); - if (!source) { - const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`; - throw new ReferenceError(msg); - } - const data = anchors.get(source); - /* istanbul ignore if */ - if (!data || data.res === undefined) { - const msg = 'This should not happen: Alias anchor was not resolved?'; - throw new ReferenceError(msg); - } - if (maxAliasCount >= 0) { - data.count += 1; - if (data.aliasCount === 0) - data.aliasCount = getAliasCount(doc, source, anchors); - if (data.count * data.aliasCount > maxAliasCount) { - const msg = 'Excessive alias count indicates a resource exhaustion attack'; - throw new ReferenceError(msg); - } - } - return data.res; - } - toString(ctx, _onComment, _onChompKeep) { - const src = `*${this.source}`; - if (ctx) { - anchors.anchorIsValid(this.source); - if (ctx.options.verifyAliasOrder && !ctx.anchors.has(this.source)) { - const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`; - throw new Error(msg); - } - if (ctx.implicitKey) - return `${src} `; - } - return src; - } -} -function getAliasCount(doc, node, anchors) { - if (Node.isAlias(node)) { - const source = node.resolve(doc); - const anchor = anchors && source && anchors.get(source); - return anchor ? anchor.count * anchor.aliasCount : 0; - } - else if (Node.isCollection(node)) { - let count = 0; - for (const item of node.items) { - const c = getAliasCount(doc, item, anchors); - if (c > count) - count = c; - } - return count; - } - else if (Node.isPair(node)) { - const kc = getAliasCount(doc, node.key, anchors); - const vc = getAliasCount(doc, node.value, anchors); - return Math.max(kc, vc); - } - return 1; -} - -exports.Alias = Alias; - - -/***/ }), - -/***/ 3466: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var createNode = __nccwpck_require__(9652); -var Node = __nccwpck_require__(1399); - -function collectionFromPath(schema, path, value) { - let v = value; - for (let i = path.length - 1; i >= 0; --i) { - const k = path[i]; - if (typeof k === 'number' && Number.isInteger(k) && k >= 0) { - const a = []; - a[k] = v; - v = a; - } - else { - v = new Map([[k, v]]); - } - } - return createNode.createNode(v, undefined, { - aliasDuplicateObjects: false, - keepUndefined: false, - onAnchor: () => { - throw new Error('This should not happen, please report a bug.'); - }, - schema, - sourceObjects: new Map() - }); -} -// Type guard is intentionally a little wrong so as to be more useful, -// as it does not cover untypable empty non-string iterables (e.g. []). -const isEmptyPath = (path) => path == null || - (typeof path === 'object' && !!path[Symbol.iterator]().next().done); -class Collection extends Node.NodeBase { - constructor(type, schema) { - super(type); - Object.defineProperty(this, 'schema', { - value: schema, - configurable: true, - enumerable: false, - writable: true - }); - } - /** - * Create a copy of this collection. - * - * @param schema - If defined, overwrites the original's schema - */ - clone(schema) { - const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this)); - if (schema) - copy.schema = schema; - copy.items = copy.items.map(it => Node.isNode(it) || Node.isPair(it) ? it.clone(schema) : it); - if (this.range) - copy.range = this.range.slice(); - return copy; - } - /** - * Adds a value to the collection. For `!!map` and `!!omap` the value must - * be a Pair instance or a `{ key, value }` object, which may not have a key - * that already exists in the map. - */ - addIn(path, value) { - if (isEmptyPath(path)) - this.add(value); - else { - const [key, ...rest] = path; - const node = this.get(key, true); - if (Node.isCollection(node)) - node.addIn(rest, value); - else if (node === undefined && this.schema) - this.set(key, collectionFromPath(this.schema, rest, value)); - else - throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); - } - } - /** - * Removes a value from the collection. - * @returns `true` if the item was found and removed. - */ - deleteIn(path) { - const [key, ...rest] = path; - if (rest.length === 0) - return this.delete(key); - const node = this.get(key, true); - if (Node.isCollection(node)) - return node.deleteIn(rest); - else - throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); - } - /** - * Returns item at `key`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - getIn(path, keepScalar) { - const [key, ...rest] = path; - const node = this.get(key, true); - if (rest.length === 0) - return !keepScalar && Node.isScalar(node) ? node.value : node; - else - return Node.isCollection(node) ? node.getIn(rest, keepScalar) : undefined; - } - hasAllNullValues(allowScalar) { - return this.items.every(node => { - if (!Node.isPair(node)) - return false; - const n = node.value; - return (n == null || - (allowScalar && - Node.isScalar(n) && - n.value == null && - !n.commentBefore && - !n.comment && - !n.tag)); - }); - } - /** - * Checks if the collection includes a value with the key `key`. - */ - hasIn(path) { - const [key, ...rest] = path; - if (rest.length === 0) - return this.has(key); - const node = this.get(key, true); - return Node.isCollection(node) ? node.hasIn(rest) : false; - } - /** - * Sets a value in this collection. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - setIn(path, value) { - const [key, ...rest] = path; - if (rest.length === 0) { - this.set(key, value); - } - else { - const node = this.get(key, true); - if (Node.isCollection(node)) - node.setIn(rest, value); - else if (node === undefined && this.schema) - this.set(key, collectionFromPath(this.schema, rest, value)); - else - throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); - } - } -} -Collection.maxFlowStringSingleLineLength = 60; - -exports.Collection = Collection; -exports.collectionFromPath = collectionFromPath; -exports.isEmptyPath = isEmptyPath; - - -/***/ }), - -/***/ 1399: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -const ALIAS = Symbol.for('yaml.alias'); -const DOC = Symbol.for('yaml.document'); -const MAP = Symbol.for('yaml.map'); -const PAIR = Symbol.for('yaml.pair'); -const SCALAR = Symbol.for('yaml.scalar'); -const SEQ = Symbol.for('yaml.seq'); -const NODE_TYPE = Symbol.for('yaml.node.type'); -const isAlias = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === ALIAS; -const isDocument = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === DOC; -const isMap = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === MAP; -const isPair = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === PAIR; -const isScalar = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SCALAR; -const isSeq = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SEQ; -function isCollection(node) { - if (node && typeof node === 'object') - switch (node[NODE_TYPE]) { - case MAP: - case SEQ: - return true; - } - return false; -} -function isNode(node) { - if (node && typeof node === 'object') - switch (node[NODE_TYPE]) { - case ALIAS: - case MAP: - case SCALAR: - case SEQ: - return true; - } - return false; -} -const hasAnchor = (node) => (isScalar(node) || isCollection(node)) && !!node.anchor; -class NodeBase { - constructor(type) { - Object.defineProperty(this, NODE_TYPE, { value: type }); - } - /** Create a copy of this node. */ - clone() { - const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this)); - if (this.range) - copy.range = this.range.slice(); - return copy; - } -} - -exports.ALIAS = ALIAS; -exports.DOC = DOC; -exports.MAP = MAP; -exports.NODE_TYPE = NODE_TYPE; -exports.NodeBase = NodeBase; -exports.PAIR = PAIR; -exports.SCALAR = SCALAR; -exports.SEQ = SEQ; -exports.hasAnchor = hasAnchor; -exports.isAlias = isAlias; -exports.isCollection = isCollection; -exports.isDocument = isDocument; -exports.isMap = isMap; -exports.isNode = isNode; -exports.isPair = isPair; -exports.isScalar = isScalar; -exports.isSeq = isSeq; - - -/***/ }), - -/***/ 246: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var createNode = __nccwpck_require__(9652); -var stringifyPair = __nccwpck_require__(4875); -var addPairToJSMap = __nccwpck_require__(4676); -var Node = __nccwpck_require__(1399); - -function createPair(key, value, ctx) { - const k = createNode.createNode(key, undefined, ctx); - const v = createNode.createNode(value, undefined, ctx); - return new Pair(k, v); -} -class Pair { - constructor(key, value = null) { - Object.defineProperty(this, Node.NODE_TYPE, { value: Node.PAIR }); - this.key = key; - this.value = value; - } - clone(schema) { - let { key, value } = this; - if (Node.isNode(key)) - key = key.clone(schema); - if (Node.isNode(value)) - value = value.clone(schema); - return new Pair(key, value); - } - toJSON(_, ctx) { - const pair = ctx?.mapAsMap ? new Map() : {}; - return addPairToJSMap.addPairToJSMap(ctx, pair, this); - } - toString(ctx, onComment, onChompKeep) { - return ctx?.doc - ? stringifyPair.stringifyPair(this, ctx, onComment, onChompKeep) - : JSON.stringify(this); - } -} - -exports.Pair = Pair; -exports.createPair = createPair; - - -/***/ }), - -/***/ 9338: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(1399); -var toJS = __nccwpck_require__(2463); - -const isScalarValue = (value) => !value || (typeof value !== 'function' && typeof value !== 'object'); -class Scalar extends Node.NodeBase { - constructor(value) { - super(Node.SCALAR); - this.value = value; - } - toJSON(arg, ctx) { - return ctx?.keep ? this.value : toJS.toJS(this.value, arg, ctx); - } - toString() { - return String(this.value); - } -} -Scalar.BLOCK_FOLDED = 'BLOCK_FOLDED'; -Scalar.BLOCK_LITERAL = 'BLOCK_LITERAL'; -Scalar.PLAIN = 'PLAIN'; -Scalar.QUOTE_DOUBLE = 'QUOTE_DOUBLE'; -Scalar.QUOTE_SINGLE = 'QUOTE_SINGLE'; - -exports.Scalar = Scalar; -exports.isScalarValue = isScalarValue; - - -/***/ }), - -/***/ 6011: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var stringifyCollection = __nccwpck_require__(2466); -var addPairToJSMap = __nccwpck_require__(4676); -var Collection = __nccwpck_require__(3466); -var Node = __nccwpck_require__(1399); -var Pair = __nccwpck_require__(246); -var Scalar = __nccwpck_require__(9338); - -function findPair(items, key) { - const k = Node.isScalar(key) ? key.value : key; - for (const it of items) { - if (Node.isPair(it)) { - if (it.key === key || it.key === k) - return it; - if (Node.isScalar(it.key) && it.key.value === k) - return it; - } - } - return undefined; -} -class YAMLMap extends Collection.Collection { - constructor(schema) { - super(Node.MAP, schema); - this.items = []; - } - static get tagName() { - return 'tag:yaml.org,2002:map'; - } - /** - * Adds a value to the collection. - * - * @param overwrite - If not set `true`, using a key that is already in the - * collection will throw. Otherwise, overwrites the previous value. - */ - add(pair, overwrite) { - let _pair; - if (Node.isPair(pair)) - _pair = pair; - else if (!pair || typeof pair !== 'object' || !('key' in pair)) { - // In TypeScript, this never happens. - _pair = new Pair.Pair(pair, pair?.value); - } - else - _pair = new Pair.Pair(pair.key, pair.value); - const prev = findPair(this.items, _pair.key); - const sortEntries = this.schema?.sortMapEntries; - if (prev) { - if (!overwrite) - throw new Error(`Key ${_pair.key} already set`); - // For scalars, keep the old node & its comments and anchors - if (Node.isScalar(prev.value) && Scalar.isScalarValue(_pair.value)) - prev.value.value = _pair.value; - else - prev.value = _pair.value; - } - else if (sortEntries) { - const i = this.items.findIndex(item => sortEntries(_pair, item) < 0); - if (i === -1) - this.items.push(_pair); - else - this.items.splice(i, 0, _pair); - } - else { - this.items.push(_pair); - } - } - delete(key) { - const it = findPair(this.items, key); - if (!it) - return false; - const del = this.items.splice(this.items.indexOf(it), 1); - return del.length > 0; - } - get(key, keepScalar) { - const it = findPair(this.items, key); - const node = it?.value; - return (!keepScalar && Node.isScalar(node) ? node.value : node) ?? undefined; - } - has(key) { - return !!findPair(this.items, key); - } - set(key, value) { - this.add(new Pair.Pair(key, value), true); - } - /** - * @param ctx - Conversion context, originally set in Document#toJS() - * @param {Class} Type - If set, forces the returned collection type - * @returns Instance of Type, Map, or Object - */ - toJSON(_, ctx, Type) { - const map = Type ? new Type() : ctx?.mapAsMap ? new Map() : {}; - if (ctx?.onCreate) - ctx.onCreate(map); - for (const item of this.items) - addPairToJSMap.addPairToJSMap(ctx, map, item); - return map; - } - toString(ctx, onComment, onChompKeep) { - if (!ctx) - return JSON.stringify(this); - for (const item of this.items) { - if (!Node.isPair(item)) - throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`); - } - if (!ctx.allNullValues && this.hasAllNullValues(false)) - ctx = Object.assign({}, ctx, { allNullValues: true }); - return stringifyCollection.stringifyCollection(this, ctx, { - blockItemPrefix: '', - flowChars: { start: '{', end: '}' }, - itemIndent: ctx.indent || '', - onChompKeep, - onComment - }); - } -} - -exports.YAMLMap = YAMLMap; -exports.findPair = findPair; - - -/***/ }), - -/***/ 5161: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var stringifyCollection = __nccwpck_require__(2466); -var Collection = __nccwpck_require__(3466); -var Node = __nccwpck_require__(1399); -var Scalar = __nccwpck_require__(9338); -var toJS = __nccwpck_require__(2463); - -class YAMLSeq extends Collection.Collection { - constructor(schema) { - super(Node.SEQ, schema); - this.items = []; - } - static get tagName() { - return 'tag:yaml.org,2002:seq'; - } - add(value) { - this.items.push(value); - } - /** - * Removes a value from the collection. - * - * `key` must contain a representation of an integer for this to succeed. - * It may be wrapped in a `Scalar`. - * - * @returns `true` if the item was found and removed. - */ - delete(key) { - const idx = asItemIndex(key); - if (typeof idx !== 'number') - return false; - const del = this.items.splice(idx, 1); - return del.length > 0; - } - get(key, keepScalar) { - const idx = asItemIndex(key); - if (typeof idx !== 'number') - return undefined; - const it = this.items[idx]; - return !keepScalar && Node.isScalar(it) ? it.value : it; - } - /** - * Checks if the collection includes a value with the key `key`. - * - * `key` must contain a representation of an integer for this to succeed. - * It may be wrapped in a `Scalar`. - */ - has(key) { - const idx = asItemIndex(key); - return typeof idx === 'number' && idx < this.items.length; - } - /** - * Sets a value in this collection. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - * - * If `key` does not contain a representation of an integer, this will throw. - * It may be wrapped in a `Scalar`. - */ - set(key, value) { - const idx = asItemIndex(key); - if (typeof idx !== 'number') - throw new Error(`Expected a valid index, not ${key}.`); - const prev = this.items[idx]; - if (Node.isScalar(prev) && Scalar.isScalarValue(value)) - prev.value = value; - else - this.items[idx] = value; - } - toJSON(_, ctx) { - const seq = []; - if (ctx?.onCreate) - ctx.onCreate(seq); - let i = 0; - for (const item of this.items) - seq.push(toJS.toJS(item, String(i++), ctx)); - return seq; - } - toString(ctx, onComment, onChompKeep) { - if (!ctx) - return JSON.stringify(this); - return stringifyCollection.stringifyCollection(this, ctx, { - blockItemPrefix: '- ', - flowChars: { start: '[', end: ']' }, - itemIndent: (ctx.indent || '') + ' ', - onChompKeep, - onComment - }); - } -} -function asItemIndex(key) { - let idx = Node.isScalar(key) ? key.value : key; - if (idx && typeof idx === 'string') - idx = Number(idx); - return typeof idx === 'number' && Number.isInteger(idx) && idx >= 0 - ? idx - : null; -} - -exports.YAMLSeq = YAMLSeq; - - -/***/ }), - -/***/ 4676: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var log = __nccwpck_require__(6909); -var stringify = __nccwpck_require__(8409); -var Node = __nccwpck_require__(1399); -var Scalar = __nccwpck_require__(9338); -var toJS = __nccwpck_require__(2463); - -const MERGE_KEY = '<<'; -function addPairToJSMap(ctx, map, { key, value }) { - if (ctx?.doc.schema.merge && isMergeKey(key)) { - value = Node.isAlias(value) ? value.resolve(ctx.doc) : value; - if (Node.isSeq(value)) - for (const it of value.items) - mergeToJSMap(ctx, map, it); - else if (Array.isArray(value)) - for (const it of value) - mergeToJSMap(ctx, map, it); - else - mergeToJSMap(ctx, map, value); - } - else { - const jsKey = toJS.toJS(key, '', ctx); - if (map instanceof Map) { - map.set(jsKey, toJS.toJS(value, jsKey, ctx)); - } - else if (map instanceof Set) { - map.add(jsKey); - } - else { - const stringKey = stringifyKey(key, jsKey, ctx); - const jsValue = toJS.toJS(value, stringKey, ctx); - if (stringKey in map) - Object.defineProperty(map, stringKey, { - value: jsValue, - writable: true, - enumerable: true, - configurable: true - }); - else - map[stringKey] = jsValue; - } - } - return map; -} -const isMergeKey = (key) => key === MERGE_KEY || - (Node.isScalar(key) && - key.value === MERGE_KEY && - (!key.type || key.type === Scalar.Scalar.PLAIN)); -// If the value associated with a merge key is a single mapping node, each of -// its key/value pairs is inserted into the current mapping, unless the key -// already exists in it. If the value associated with the merge key is a -// sequence, then this sequence is expected to contain mapping nodes and each -// of these nodes is merged in turn according to its order in the sequence. -// Keys in mapping nodes earlier in the sequence override keys specified in -// later mapping nodes. -- http://yaml.org/type/merge.html -function mergeToJSMap(ctx, map, value) { - const source = ctx && Node.isAlias(value) ? value.resolve(ctx.doc) : value; - if (!Node.isMap(source)) - throw new Error('Merge sources must be maps or map aliases'); - const srcMap = source.toJSON(null, ctx, Map); - for (const [key, value] of srcMap) { - if (map instanceof Map) { - if (!map.has(key)) - map.set(key, value); - } - else if (map instanceof Set) { - map.add(key); - } - else if (!Object.prototype.hasOwnProperty.call(map, key)) { - Object.defineProperty(map, key, { - value, - writable: true, - enumerable: true, - configurable: true - }); - } - } - return map; -} -function stringifyKey(key, jsKey, ctx) { - if (jsKey === null) - return ''; - if (typeof jsKey !== 'object') - return String(jsKey); - if (Node.isNode(key) && ctx && ctx.doc) { - const strCtx = stringify.createStringifyContext(ctx.doc, {}); - strCtx.anchors = new Set(); - for (const node of ctx.anchors.keys()) - strCtx.anchors.add(node.anchor); - strCtx.inFlow = true; - strCtx.inStringifyKey = true; - const strKey = key.toString(strCtx); - if (!ctx.mapKeyWarned) { - let jsonStr = JSON.stringify(strKey); - if (jsonStr.length > 40) - jsonStr = jsonStr.substring(0, 36) + '..."'; - log.warn(ctx.doc.options.logLevel, `Keys with collection values will be stringified due to JS Object restrictions: ${jsonStr}. Set mapAsMap: true to use object keys.`); - ctx.mapKeyWarned = true; - } - return strKey; - } - return JSON.stringify(jsKey); -} - -exports.addPairToJSMap = addPairToJSMap; - - -/***/ }), - -/***/ 2463: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(1399); - -/** - * Recursively convert any node or its contents to native JavaScript - * - * @param value - The input value - * @param arg - If `value` defines a `toJSON()` method, use this - * as its first argument - * @param ctx - Conversion context, originally set in Document#toJS(). If - * `{ keep: true }` is not set, output should be suitable for JSON - * stringification. - */ -function toJS(value, arg, ctx) { - // eslint-disable-next-line @typescript-eslint/no-unsafe-return - if (Array.isArray(value)) - return value.map((v, i) => toJS(v, String(i), ctx)); - if (value && typeof value.toJSON === 'function') { - // eslint-disable-next-line @typescript-eslint/no-unsafe-call - if (!ctx || !Node.hasAnchor(value)) - return value.toJSON(arg, ctx); - const data = { aliasCount: 0, count: 1, res: undefined }; - ctx.anchors.set(value, data); - ctx.onCreate = res => { - data.res = res; - delete ctx.onCreate; - }; - const res = value.toJSON(arg, ctx); - if (ctx.onCreate) - ctx.onCreate(res); - return res; - } - if (typeof value === 'bigint' && !ctx?.keep) - return Number(value); - return value; -} - -exports.toJS = toJS; - - -/***/ }), - -/***/ 9027: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var resolveBlockScalar = __nccwpck_require__(9485); -var resolveFlowScalar = __nccwpck_require__(7578); -var errors = __nccwpck_require__(4236); -var stringifyString = __nccwpck_require__(6226); - -function resolveAsScalar(token, strict = true, onError) { - if (token) { - const _onError = (pos, code, message) => { - const offset = typeof pos === 'number' ? pos : Array.isArray(pos) ? pos[0] : pos.offset; - if (onError) - onError(offset, code, message); - else - throw new errors.YAMLParseError([offset, offset + 1], code, message); - }; - switch (token.type) { - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - return resolveFlowScalar.resolveFlowScalar(token, strict, _onError); - case 'block-scalar': - return resolveBlockScalar.resolveBlockScalar(token, strict, _onError); - } - } - return null; -} -/** - * Create a new scalar token with `value` - * - * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`, - * as this function does not support any schema operations and won't check for such conflicts. - * - * @param value The string representation of the value, which will have its content properly indented. - * @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added. - * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value. - * @param context.indent The indent level of the token. - * @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value. - * @param context.offset The offset position of the token. - * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`. - */ -function createScalarToken(value, context) { - const { implicitKey = false, indent, inFlow = false, offset = -1, type = 'PLAIN' } = context; - const source = stringifyString.stringifyString({ type, value }, { - implicitKey, - indent: indent > 0 ? ' '.repeat(indent) : '', - inFlow, - options: { blockQuote: true, lineWidth: -1 } - }); - const end = context.end ?? [ - { type: 'newline', offset: -1, indent, source: '\n' } - ]; - switch (source[0]) { - case '|': - case '>': { - const he = source.indexOf('\n'); - const head = source.substring(0, he); - const body = source.substring(he + 1) + '\n'; - const props = [ - { type: 'block-scalar-header', offset, indent, source: head } - ]; - if (!addEndtoBlockProps(props, end)) - props.push({ type: 'newline', offset: -1, indent, source: '\n' }); - return { type: 'block-scalar', offset, indent, props, source: body }; - } - case '"': - return { type: 'double-quoted-scalar', offset, indent, source, end }; - case "'": - return { type: 'single-quoted-scalar', offset, indent, source, end }; - default: - return { type: 'scalar', offset, indent, source, end }; - } -} -/** - * Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have. - * - * Best efforts are made to retain any comments previously associated with the `token`, - * though all contents within a collection's `items` will be overwritten. - * - * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`, - * as this function does not support any schema operations and won't check for such conflicts. - * - * @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key. - * @param value The string representation of the value, which will have its content properly indented. - * @param context.afterKey In most cases, values after a key should have an additional level of indentation. - * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value. - * @param context.inFlow Being within a flow collection may affect the resolved type of the token's value. - * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`. - */ -function setScalarValue(token, value, context = {}) { - let { afterKey = false, implicitKey = false, inFlow = false, type } = context; - let indent = 'indent' in token ? token.indent : null; - if (afterKey && typeof indent === 'number') - indent += 2; - if (!type) - switch (token.type) { - case 'single-quoted-scalar': - type = 'QUOTE_SINGLE'; - break; - case 'double-quoted-scalar': - type = 'QUOTE_DOUBLE'; - break; - case 'block-scalar': { - const header = token.props[0]; - if (header.type !== 'block-scalar-header') - throw new Error('Invalid block scalar header'); - type = header.source[0] === '>' ? 'BLOCK_FOLDED' : 'BLOCK_LITERAL'; - break; - } - default: - type = 'PLAIN'; - } - const source = stringifyString.stringifyString({ type, value }, { - implicitKey: implicitKey || indent === null, - indent: indent !== null && indent > 0 ? ' '.repeat(indent) : '', - inFlow, - options: { blockQuote: true, lineWidth: -1 } - }); - switch (source[0]) { - case '|': - case '>': - setBlockScalarValue(token, source); - break; - case '"': - setFlowScalarValue(token, source, 'double-quoted-scalar'); - break; - case "'": - setFlowScalarValue(token, source, 'single-quoted-scalar'); - break; - default: - setFlowScalarValue(token, source, 'scalar'); - } -} -function setBlockScalarValue(token, source) { - const he = source.indexOf('\n'); - const head = source.substring(0, he); - const body = source.substring(he + 1) + '\n'; - if (token.type === 'block-scalar') { - const header = token.props[0]; - if (header.type !== 'block-scalar-header') - throw new Error('Invalid block scalar header'); - header.source = head; - token.source = body; - } - else { - const { offset } = token; - const indent = 'indent' in token ? token.indent : -1; - const props = [ - { type: 'block-scalar-header', offset, indent, source: head } - ]; - if (!addEndtoBlockProps(props, 'end' in token ? token.end : undefined)) - props.push({ type: 'newline', offset: -1, indent, source: '\n' }); - for (const key of Object.keys(token)) - if (key !== 'type' && key !== 'offset') - delete token[key]; - Object.assign(token, { type: 'block-scalar', indent, props, source: body }); - } -} -/** @returns `true` if last token is a newline */ -function addEndtoBlockProps(props, end) { - if (end) - for (const st of end) - switch (st.type) { - case 'space': - case 'comment': - props.push(st); - break; - case 'newline': - props.push(st); - return true; - } - return false; -} -function setFlowScalarValue(token, source, type) { - switch (token.type) { - case 'scalar': - case 'double-quoted-scalar': - case 'single-quoted-scalar': - token.type = type; - token.source = source; - break; - case 'block-scalar': { - const end = token.props.slice(1); - let oa = source.length; - if (token.props[0].type === 'block-scalar-header') - oa -= token.props[0].source.length; - for (const tok of end) - tok.offset += oa; - delete token.props; - Object.assign(token, { type, source, end }); - break; - } - case 'block-map': - case 'block-seq': { - const offset = token.offset + source.length; - const nl = { type: 'newline', offset, indent: token.indent, source: '\n' }; - delete token.items; - Object.assign(token, { type, source, end: [nl] }); - break; - } - default: { - const indent = 'indent' in token ? token.indent : -1; - const end = 'end' in token && Array.isArray(token.end) - ? token.end.filter(st => st.type === 'space' || - st.type === 'comment' || - st.type === 'newline') - : []; - for (const key of Object.keys(token)) - if (key !== 'type' && key !== 'offset') - delete token[key]; - Object.assign(token, { type, indent, source, end }); - } - } -} - -exports.createScalarToken = createScalarToken; -exports.resolveAsScalar = resolveAsScalar; -exports.setScalarValue = setScalarValue; - - -/***/ }), - -/***/ 6307: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -/** - * Stringify a CST document, token, or collection item - * - * Fair warning: This applies no validation whatsoever, and - * simply concatenates the sources in their logical order. - */ -const stringify = (cst) => 'type' in cst ? stringifyToken(cst) : stringifyItem(cst); -function stringifyToken(token) { - switch (token.type) { - case 'block-scalar': { - let res = ''; - for (const tok of token.props) - res += stringifyToken(tok); - return res + token.source; - } - case 'block-map': - case 'block-seq': { - let res = ''; - for (const item of token.items) - res += stringifyItem(item); - return res; - } - case 'flow-collection': { - let res = token.start.source; - for (const item of token.items) - res += stringifyItem(item); - for (const st of token.end) - res += st.source; - return res; - } - case 'document': { - let res = stringifyItem(token); - if (token.end) - for (const st of token.end) - res += st.source; - return res; - } - default: { - let res = token.source; - if ('end' in token && token.end) - for (const st of token.end) - res += st.source; - return res; - } - } -} -function stringifyItem({ start, key, sep, value }) { - let res = ''; - for (const st of start) - res += st.source; - if (key) - res += stringifyToken(key); - if (sep) - for (const st of sep) - res += st.source; - if (value) - res += stringifyToken(value); - return res; -} - -exports.stringify = stringify; - - -/***/ }), - -/***/ 8497: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -const BREAK = Symbol('break visit'); -const SKIP = Symbol('skip children'); -const REMOVE = Symbol('remove item'); -/** - * Apply a visitor to a CST document or item. - * - * Walks through the tree (depth-first) starting from the root, calling a - * `visitor` function with two arguments when entering each item: - * - `item`: The current item, which included the following members: - * - `start: SourceToken[]` – Source tokens before the key or value, - * possibly including its anchor or tag. - * - `key?: Token | null` – Set for pair values. May then be `null`, if - * the key before the `:` separator is empty. - * - `sep?: SourceToken[]` – Source tokens between the key and the value, - * which should include the `:` map value indicator if `value` is set. - * - `value?: Token` – The value of a sequence item, or of a map pair. - * - `path`: The steps from the root to the current node, as an array of - * `['key' | 'value', number]` tuples. - * - * The return value of the visitor may be used to control the traversal: - * - `undefined` (default): Do nothing and continue - * - `visit.SKIP`: Do not visit the children of this token, continue with - * next sibling - * - `visit.BREAK`: Terminate traversal completely - * - `visit.REMOVE`: Remove the current item, then continue with the next one - * - `number`: Set the index of the next step. This is useful especially if - * the index of the current token has changed. - * - `function`: Define the next visitor for this item. After the original - * visitor is called on item entry, next visitors are called after handling - * a non-empty `key` and when exiting the item. - */ -function visit(cst, visitor) { - if ('type' in cst && cst.type === 'document') - cst = { start: cst.start, value: cst.value }; - _visit(Object.freeze([]), cst, visitor); -} -// Without the `as symbol` casts, TS declares these in the `visit` -// namespace using `var`, but then complains about that because -// `unique symbol` must be `const`. -/** Terminate visit traversal completely */ -visit.BREAK = BREAK; -/** Do not visit the children of the current item */ -visit.SKIP = SKIP; -/** Remove the current item */ -visit.REMOVE = REMOVE; -/** Find the item at `path` from `cst` as the root */ -visit.itemAtPath = (cst, path) => { - let item = cst; - for (const [field, index] of path) { - const tok = item?.[field]; - if (tok && 'items' in tok) { - item = tok.items[index]; - } - else - return undefined; - } - return item; -}; -/** - * Get the immediate parent collection of the item at `path` from `cst` as the root. - * - * Throws an error if the collection is not found, which should never happen if the item itself exists. - */ -visit.parentCollection = (cst, path) => { - const parent = visit.itemAtPath(cst, path.slice(0, -1)); - const field = path[path.length - 1][0]; - const coll = parent?.[field]; - if (coll && 'items' in coll) - return coll; - throw new Error('Parent collection not found'); -}; -function _visit(path, item, visitor) { - let ctrl = visitor(item, path); - if (typeof ctrl === 'symbol') - return ctrl; - for (const field of ['key', 'value']) { - const token = item[field]; - if (token && 'items' in token) { - for (let i = 0; i < token.items.length; ++i) { - const ci = _visit(Object.freeze(path.concat([[field, i]])), token.items[i], visitor); - if (typeof ci === 'number') - i = ci - 1; - else if (ci === BREAK) - return BREAK; - else if (ci === REMOVE) { - token.items.splice(i, 1); - i -= 1; - } - } - if (typeof ctrl === 'function' && field === 'key') - ctrl = ctrl(item, path); - } - } - return typeof ctrl === 'function' ? ctrl(item, path) : ctrl; -} - -exports.visit = visit; - - -/***/ }), - -/***/ 9169: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var cstScalar = __nccwpck_require__(9027); -var cstStringify = __nccwpck_require__(6307); -var cstVisit = __nccwpck_require__(8497); - -/** The byte order mark */ -const BOM = '\u{FEFF}'; -/** Start of doc-mode */ -const DOCUMENT = '\x02'; // C0: Start of Text -/** Unexpected end of flow-mode */ -const FLOW_END = '\x18'; // C0: Cancel -/** Next token is a scalar value */ -const SCALAR = '\x1f'; // C0: Unit Separator -/** @returns `true` if `token` is a flow or block collection */ -const isCollection = (token) => !!token && 'items' in token; -/** @returns `true` if `token` is a flow or block scalar; not an alias */ -const isScalar = (token) => !!token && - (token.type === 'scalar' || - token.type === 'single-quoted-scalar' || - token.type === 'double-quoted-scalar' || - token.type === 'block-scalar'); -/* istanbul ignore next */ -/** Get a printable representation of a lexer token */ -function prettyToken(token) { - switch (token) { - case BOM: - return ''; - case DOCUMENT: - return ''; - case FLOW_END: - return ''; - case SCALAR: - return ''; - default: - return JSON.stringify(token); - } -} -/** Identify the type of a lexer token. May return `null` for unknown tokens. */ -function tokenType(source) { - switch (source) { - case BOM: - return 'byte-order-mark'; - case DOCUMENT: - return 'doc-mode'; - case FLOW_END: - return 'flow-error-end'; - case SCALAR: - return 'scalar'; - case '---': - return 'doc-start'; - case '...': - return 'doc-end'; - case '': - case '\n': - case '\r\n': - return 'newline'; - case '-': - return 'seq-item-ind'; - case '?': - return 'explicit-key-ind'; - case ':': - return 'map-value-ind'; - case '{': - return 'flow-map-start'; - case '}': - return 'flow-map-end'; - case '[': - return 'flow-seq-start'; - case ']': - return 'flow-seq-end'; - case ',': - return 'comma'; - } - switch (source[0]) { - case ' ': - case '\t': - return 'space'; - case '#': - return 'comment'; - case '%': - return 'directive-line'; - case '*': - return 'alias'; - case '&': - return 'anchor'; - case '!': - return 'tag'; - case "'": - return 'single-quoted-scalar'; - case '"': - return 'double-quoted-scalar'; - case '|': - case '>': - return 'block-scalar-header'; - } - return null; -} - -exports.createScalarToken = cstScalar.createScalarToken; -exports.resolveAsScalar = cstScalar.resolveAsScalar; -exports.setScalarValue = cstScalar.setScalarValue; -exports.stringify = cstStringify.stringify; -exports.visit = cstVisit.visit; -exports.BOM = BOM; -exports.DOCUMENT = DOCUMENT; -exports.FLOW_END = FLOW_END; -exports.SCALAR = SCALAR; -exports.isCollection = isCollection; -exports.isScalar = isScalar; -exports.prettyToken = prettyToken; -exports.tokenType = tokenType; - - -/***/ }), - -/***/ 5976: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var cst = __nccwpck_require__(9169); - -/* -START -> stream - -stream - directive -> line-end -> stream - indent + line-end -> stream - [else] -> line-start - -line-end - comment -> line-end - newline -> . - input-end -> END - -line-start - doc-start -> doc - doc-end -> stream - [else] -> indent -> block-start - -block-start - seq-item-start -> block-start - explicit-key-start -> block-start - map-value-start -> block-start - [else] -> doc - -doc - line-end -> line-start - spaces -> doc - anchor -> doc - tag -> doc - flow-start -> flow -> doc - flow-end -> error -> doc - seq-item-start -> error -> doc - explicit-key-start -> error -> doc - map-value-start -> doc - alias -> doc - quote-start -> quoted-scalar -> doc - block-scalar-header -> line-end -> block-scalar(min) -> line-start - [else] -> plain-scalar(false, min) -> doc - -flow - line-end -> flow - spaces -> flow - anchor -> flow - tag -> flow - flow-start -> flow -> flow - flow-end -> . - seq-item-start -> error -> flow - explicit-key-start -> flow - map-value-start -> flow - alias -> flow - quote-start -> quoted-scalar -> flow - comma -> flow - [else] -> plain-scalar(true, 0) -> flow - -quoted-scalar - quote-end -> . - [else] -> quoted-scalar - -block-scalar(min) - newline + peek(indent < min) -> . - [else] -> block-scalar(min) - -plain-scalar(is-flow, min) - scalar-end(is-flow) -> . - peek(newline + (indent < min)) -> . - [else] -> plain-scalar(min) -*/ -function isEmpty(ch) { - switch (ch) { - case undefined: - case ' ': - case '\n': - case '\r': - case '\t': - return true; - default: - return false; - } -} -const hexDigits = '0123456789ABCDEFabcdef'.split(''); -const tagChars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()".split(''); -const invalidFlowScalarChars = ',[]{}'.split(''); -const invalidAnchorChars = ' ,[]{}\n\r\t'.split(''); -const isNotAnchorChar = (ch) => !ch || invalidAnchorChars.includes(ch); -/** - * Splits an input string into lexical tokens, i.e. smaller strings that are - * easily identifiable by `tokens.tokenType()`. - * - * Lexing starts always in a "stream" context. Incomplete input may be buffered - * until a complete token can be emitted. - * - * In addition to slices of the original input, the following control characters - * may also be emitted: - * - * - `\x02` (Start of Text): A document starts with the next token - * - `\x18` (Cancel): Unexpected end of flow-mode (indicates an error) - * - `\x1f` (Unit Separator): Next token is a scalar value - * - `\u{FEFF}` (Byte order mark): Emitted separately outside documents - */ -class Lexer { - constructor() { - /** - * Flag indicating whether the end of the current buffer marks the end of - * all input - */ - this.atEnd = false; - /** - * Explicit indent set in block scalar header, as an offset from the current - * minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not - * explicitly set. - */ - this.blockScalarIndent = -1; - /** - * Block scalars that include a + (keep) chomping indicator in their header - * include trailing empty lines, which are otherwise excluded from the - * scalar's contents. - */ - this.blockScalarKeep = false; - /** Current input */ - this.buffer = ''; - /** - * Flag noting whether the map value indicator : can immediately follow this - * node within a flow context. - */ - this.flowKey = false; - /** Count of surrounding flow collection levels. */ - this.flowLevel = 0; - /** - * Minimum level of indentation required for next lines to be parsed as a - * part of the current scalar value. - */ - this.indentNext = 0; - /** Indentation level of the current line. */ - this.indentValue = 0; - /** Position of the next \n character. */ - this.lineEndPos = null; - /** Stores the state of the lexer if reaching the end of incpomplete input */ - this.next = null; - /** A pointer to `buffer`; the current position of the lexer. */ - this.pos = 0; - } - /** - * Generate YAML tokens from the `source` string. If `incomplete`, - * a part of the last line may be left as a buffer for the next call. - * - * @returns A generator of lexical tokens - */ - *lex(source, incomplete = false) { - if (source) { - this.buffer = this.buffer ? this.buffer + source : source; - this.lineEndPos = null; - } - this.atEnd = !incomplete; - let next = this.next ?? 'stream'; - while (next && (incomplete || this.hasChars(1))) - next = yield* this.parseNext(next); - } - atLineEnd() { - let i = this.pos; - let ch = this.buffer[i]; - while (ch === ' ' || ch === '\t') - ch = this.buffer[++i]; - if (!ch || ch === '#' || ch === '\n') - return true; - if (ch === '\r') - return this.buffer[i + 1] === '\n'; - return false; - } - charAt(n) { - return this.buffer[this.pos + n]; - } - continueScalar(offset) { - let ch = this.buffer[offset]; - if (this.indentNext > 0) { - let indent = 0; - while (ch === ' ') - ch = this.buffer[++indent + offset]; - if (ch === '\r') { - const next = this.buffer[indent + offset + 1]; - if (next === '\n' || (!next && !this.atEnd)) - return offset + indent + 1; - } - return ch === '\n' || indent >= this.indentNext || (!ch && !this.atEnd) - ? offset + indent - : -1; - } - if (ch === '-' || ch === '.') { - const dt = this.buffer.substr(offset, 3); - if ((dt === '---' || dt === '...') && isEmpty(this.buffer[offset + 3])) - return -1; - } - return offset; - } - getLine() { - let end = this.lineEndPos; - if (typeof end !== 'number' || (end !== -1 && end < this.pos)) { - end = this.buffer.indexOf('\n', this.pos); - this.lineEndPos = end; - } - if (end === -1) - return this.atEnd ? this.buffer.substring(this.pos) : null; - if (this.buffer[end - 1] === '\r') - end -= 1; - return this.buffer.substring(this.pos, end); - } - hasChars(n) { - return this.pos + n <= this.buffer.length; - } - setNext(state) { - this.buffer = this.buffer.substring(this.pos); - this.pos = 0; - this.lineEndPos = null; - this.next = state; - return null; - } - peek(n) { - return this.buffer.substr(this.pos, n); - } - *parseNext(next) { - switch (next) { - case 'stream': - return yield* this.parseStream(); - case 'line-start': - return yield* this.parseLineStart(); - case 'block-start': - return yield* this.parseBlockStart(); - case 'doc': - return yield* this.parseDocument(); - case 'flow': - return yield* this.parseFlowCollection(); - case 'quoted-scalar': - return yield* this.parseQuotedScalar(); - case 'block-scalar': - return yield* this.parseBlockScalar(); - case 'plain-scalar': - return yield* this.parsePlainScalar(); - } - } - *parseStream() { - let line = this.getLine(); - if (line === null) - return this.setNext('stream'); - if (line[0] === cst.BOM) { - yield* this.pushCount(1); - line = line.substring(1); - } - if (line[0] === '%') { - let dirEnd = line.length; - const cs = line.indexOf('#'); - if (cs !== -1) { - const ch = line[cs - 1]; - if (ch === ' ' || ch === '\t') - dirEnd = cs - 1; - } - while (true) { - const ch = line[dirEnd - 1]; - if (ch === ' ' || ch === '\t') - dirEnd -= 1; - else - break; - } - const n = (yield* this.pushCount(dirEnd)) + (yield* this.pushSpaces(true)); - yield* this.pushCount(line.length - n); // possible comment - this.pushNewline(); - return 'stream'; - } - if (this.atLineEnd()) { - const sp = yield* this.pushSpaces(true); - yield* this.pushCount(line.length - sp); - yield* this.pushNewline(); - return 'stream'; - } - yield cst.DOCUMENT; - return yield* this.parseLineStart(); - } - *parseLineStart() { - const ch = this.charAt(0); - if (!ch && !this.atEnd) - return this.setNext('line-start'); - if (ch === '-' || ch === '.') { - if (!this.atEnd && !this.hasChars(4)) - return this.setNext('line-start'); - const s = this.peek(3); - if (s === '---' && isEmpty(this.charAt(3))) { - yield* this.pushCount(3); - this.indentValue = 0; - this.indentNext = 0; - return 'doc'; - } - else if (s === '...' && isEmpty(this.charAt(3))) { - yield* this.pushCount(3); - return 'stream'; - } - } - this.indentValue = yield* this.pushSpaces(false); - if (this.indentNext > this.indentValue && !isEmpty(this.charAt(1))) - this.indentNext = this.indentValue; - return yield* this.parseBlockStart(); - } - *parseBlockStart() { - const [ch0, ch1] = this.peek(2); - if (!ch1 && !this.atEnd) - return this.setNext('block-start'); - if ((ch0 === '-' || ch0 === '?' || ch0 === ':') && isEmpty(ch1)) { - const n = (yield* this.pushCount(1)) + (yield* this.pushSpaces(true)); - this.indentNext = this.indentValue + 1; - this.indentValue += n; - return yield* this.parseBlockStart(); - } - return 'doc'; - } - *parseDocument() { - yield* this.pushSpaces(true); - const line = this.getLine(); - if (line === null) - return this.setNext('doc'); - let n = yield* this.pushIndicators(); - switch (line[n]) { - case '#': - yield* this.pushCount(line.length - n); - // fallthrough - case undefined: - yield* this.pushNewline(); - return yield* this.parseLineStart(); - case '{': - case '[': - yield* this.pushCount(1); - this.flowKey = false; - this.flowLevel = 1; - return 'flow'; - case '}': - case ']': - // this is an error - yield* this.pushCount(1); - return 'doc'; - case '*': - yield* this.pushUntil(isNotAnchorChar); - return 'doc'; - case '"': - case "'": - return yield* this.parseQuotedScalar(); - case '|': - case '>': - n += yield* this.parseBlockScalarHeader(); - n += yield* this.pushSpaces(true); - yield* this.pushCount(line.length - n); - yield* this.pushNewline(); - return yield* this.parseBlockScalar(); - default: - return yield* this.parsePlainScalar(); - } - } - *parseFlowCollection() { - let nl, sp; - let indent = -1; - do { - nl = yield* this.pushNewline(); - if (nl > 0) { - sp = yield* this.pushSpaces(false); - this.indentValue = indent = sp; - } - else { - sp = 0; - } - sp += yield* this.pushSpaces(true); - } while (nl + sp > 0); - const line = this.getLine(); - if (line === null) - return this.setNext('flow'); - if ((indent !== -1 && indent < this.indentNext && line[0] !== '#') || - (indent === 0 && - (line.startsWith('---') || line.startsWith('...')) && - isEmpty(line[3]))) { - // Allowing for the terminal ] or } at the same (rather than greater) - // indent level as the initial [ or { is technically invalid, but - // failing here would be surprising to users. - const atFlowEndMarker = indent === this.indentNext - 1 && - this.flowLevel === 1 && - (line[0] === ']' || line[0] === '}'); - if (!atFlowEndMarker) { - // this is an error - this.flowLevel = 0; - yield cst.FLOW_END; - return yield* this.parseLineStart(); - } - } - let n = 0; - while (line[n] === ',') { - n += yield* this.pushCount(1); - n += yield* this.pushSpaces(true); - this.flowKey = false; - } - n += yield* this.pushIndicators(); - switch (line[n]) { - case undefined: - return 'flow'; - case '#': - yield* this.pushCount(line.length - n); - return 'flow'; - case '{': - case '[': - yield* this.pushCount(1); - this.flowKey = false; - this.flowLevel += 1; - return 'flow'; - case '}': - case ']': - yield* this.pushCount(1); - this.flowKey = true; - this.flowLevel -= 1; - return this.flowLevel ? 'flow' : 'doc'; - case '*': - yield* this.pushUntil(isNotAnchorChar); - return 'flow'; - case '"': - case "'": - this.flowKey = true; - return yield* this.parseQuotedScalar(); - case ':': { - const next = this.charAt(1); - if (this.flowKey || isEmpty(next) || next === ',') { - this.flowKey = false; - yield* this.pushCount(1); - yield* this.pushSpaces(true); - return 'flow'; - } - } - // fallthrough - default: - this.flowKey = false; - return yield* this.parsePlainScalar(); - } - } - *parseQuotedScalar() { - const quote = this.charAt(0); - let end = this.buffer.indexOf(quote, this.pos + 1); - if (quote === "'") { - while (end !== -1 && this.buffer[end + 1] === "'") - end = this.buffer.indexOf("'", end + 2); - } - else { - // double-quote - while (end !== -1) { - let n = 0; - while (this.buffer[end - 1 - n] === '\\') - n += 1; - if (n % 2 === 0) - break; - end = this.buffer.indexOf('"', end + 1); - } - } - // Only looking for newlines within the quotes - const qb = this.buffer.substring(0, end); - let nl = qb.indexOf('\n', this.pos); - if (nl !== -1) { - while (nl !== -1) { - const cs = this.continueScalar(nl + 1); - if (cs === -1) - break; - nl = qb.indexOf('\n', cs); - } - if (nl !== -1) { - // this is an error caused by an unexpected unindent - end = nl - (qb[nl - 1] === '\r' ? 2 : 1); - } - } - if (end === -1) { - if (!this.atEnd) - return this.setNext('quoted-scalar'); - end = this.buffer.length; - } - yield* this.pushToIndex(end + 1, false); - return this.flowLevel ? 'flow' : 'doc'; - } - *parseBlockScalarHeader() { - this.blockScalarIndent = -1; - this.blockScalarKeep = false; - let i = this.pos; - while (true) { - const ch = this.buffer[++i]; - if (ch === '+') - this.blockScalarKeep = true; - else if (ch > '0' && ch <= '9') - this.blockScalarIndent = Number(ch) - 1; - else if (ch !== '-') - break; - } - return yield* this.pushUntil(ch => isEmpty(ch) || ch === '#'); - } - *parseBlockScalar() { - let nl = this.pos - 1; // may be -1 if this.pos === 0 - let indent = 0; - let ch; - loop: for (let i = this.pos; (ch = this.buffer[i]); ++i) { - switch (ch) { - case ' ': - indent += 1; - break; - case '\n': - nl = i; - indent = 0; - break; - case '\r': { - const next = this.buffer[i + 1]; - if (!next && !this.atEnd) - return this.setNext('block-scalar'); - if (next === '\n') - break; - } // fallthrough - default: - break loop; - } - } - if (!ch && !this.atEnd) - return this.setNext('block-scalar'); - if (indent >= this.indentNext) { - if (this.blockScalarIndent === -1) - this.indentNext = indent; - else - this.indentNext += this.blockScalarIndent; - do { - const cs = this.continueScalar(nl + 1); - if (cs === -1) - break; - nl = this.buffer.indexOf('\n', cs); - } while (nl !== -1); - if (nl === -1) { - if (!this.atEnd) - return this.setNext('block-scalar'); - nl = this.buffer.length; - } - } - if (!this.blockScalarKeep) { - do { - let i = nl - 1; - let ch = this.buffer[i]; - if (ch === '\r') - ch = this.buffer[--i]; - const lastChar = i; // Drop the line if last char not more indented - while (ch === ' ' || ch === '\t') - ch = this.buffer[--i]; - if (ch === '\n' && i >= this.pos && i + 1 + indent > lastChar) - nl = i; - else - break; - } while (true); - } - yield cst.SCALAR; - yield* this.pushToIndex(nl + 1, true); - return yield* this.parseLineStart(); - } - *parsePlainScalar() { - const inFlow = this.flowLevel > 0; - let end = this.pos - 1; - let i = this.pos - 1; - let ch; - while ((ch = this.buffer[++i])) { - if (ch === ':') { - const next = this.buffer[i + 1]; - if (isEmpty(next) || (inFlow && next === ',')) - break; - end = i; - } - else if (isEmpty(ch)) { - let next = this.buffer[i + 1]; - if (ch === '\r') { - if (next === '\n') { - i += 1; - ch = '\n'; - next = this.buffer[i + 1]; - } - else - end = i; - } - if (next === '#' || (inFlow && invalidFlowScalarChars.includes(next))) - break; - if (ch === '\n') { - const cs = this.continueScalar(i + 1); - if (cs === -1) - break; - i = Math.max(i, cs - 2); // to advance, but still account for ' #' - } - } - else { - if (inFlow && invalidFlowScalarChars.includes(ch)) - break; - end = i; - } - } - if (!ch && !this.atEnd) - return this.setNext('plain-scalar'); - yield cst.SCALAR; - yield* this.pushToIndex(end + 1, true); - return inFlow ? 'flow' : 'doc'; - } - *pushCount(n) { - if (n > 0) { - yield this.buffer.substr(this.pos, n); - this.pos += n; - return n; - } - return 0; - } - *pushToIndex(i, allowEmpty) { - const s = this.buffer.slice(this.pos, i); - if (s) { - yield s; - this.pos += s.length; - return s.length; - } - else if (allowEmpty) - yield ''; - return 0; - } - *pushIndicators() { - switch (this.charAt(0)) { - case '!': - return ((yield* this.pushTag()) + - (yield* this.pushSpaces(true)) + - (yield* this.pushIndicators())); - case '&': - return ((yield* this.pushUntil(isNotAnchorChar)) + - (yield* this.pushSpaces(true)) + - (yield* this.pushIndicators())); - case '-': // this is an error - case '?': // this is an error outside flow collections - case ':': { - const inFlow = this.flowLevel > 0; - const ch1 = this.charAt(1); - if (isEmpty(ch1) || (inFlow && invalidFlowScalarChars.includes(ch1))) { - if (!inFlow) - this.indentNext = this.indentValue + 1; - else if (this.flowKey) - this.flowKey = false; - return ((yield* this.pushCount(1)) + - (yield* this.pushSpaces(true)) + - (yield* this.pushIndicators())); - } - } - } - return 0; - } - *pushTag() { - if (this.charAt(1) === '<') { - let i = this.pos + 2; - let ch = this.buffer[i]; - while (!isEmpty(ch) && ch !== '>') - ch = this.buffer[++i]; - return yield* this.pushToIndex(ch === '>' ? i + 1 : i, false); - } - else { - let i = this.pos + 1; - let ch = this.buffer[i]; - while (ch) { - if (tagChars.includes(ch)) - ch = this.buffer[++i]; - else if (ch === '%' && - hexDigits.includes(this.buffer[i + 1]) && - hexDigits.includes(this.buffer[i + 2])) { - ch = this.buffer[(i += 3)]; - } - else - break; - } - return yield* this.pushToIndex(i, false); - } - } - *pushNewline() { - const ch = this.buffer[this.pos]; - if (ch === '\n') - return yield* this.pushCount(1); - else if (ch === '\r' && this.charAt(1) === '\n') - return yield* this.pushCount(2); - else - return 0; - } - *pushSpaces(allowTabs) { - let i = this.pos - 1; - let ch; - do { - ch = this.buffer[++i]; - } while (ch === ' ' || (allowTabs && ch === '\t')); - const n = i - this.pos; - if (n > 0) { - yield this.buffer.substr(this.pos, n); - this.pos = i; - } - return n; - } - *pushUntil(test) { - let i = this.pos; - let ch = this.buffer[i]; - while (!test(ch)) - ch = this.buffer[++i]; - return yield* this.pushToIndex(i, false); - } -} - -exports.Lexer = Lexer; - - -/***/ }), - -/***/ 1929: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -/** - * Tracks newlines during parsing in order to provide an efficient API for - * determining the one-indexed `{ line, col }` position for any offset - * within the input. - */ -class LineCounter { - constructor() { - this.lineStarts = []; - /** - * Should be called in ascending order. Otherwise, call - * `lineCounter.lineStarts.sort()` before calling `linePos()`. - */ - this.addNewLine = (offset) => this.lineStarts.push(offset); - /** - * Performs a binary search and returns the 1-indexed { line, col } - * position of `offset`. If `line === 0`, `addNewLine` has never been - * called or `offset` is before the first known newline. - */ - this.linePos = (offset) => { - let low = 0; - let high = this.lineStarts.length; - while (low < high) { - const mid = (low + high) >> 1; // Math.floor((low + high) / 2) - if (this.lineStarts[mid] < offset) - low = mid + 1; - else - high = mid; - } - if (this.lineStarts[low] === offset) - return { line: low + 1, col: 1 }; - if (low === 0) - return { line: 0, col: offset }; - const start = this.lineStarts[low - 1]; - return { line: low, col: offset - start + 1 }; - }; - } -} - -exports.LineCounter = LineCounter; - - -/***/ }), - -/***/ 3328: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var cst = __nccwpck_require__(9169); -var lexer = __nccwpck_require__(5976); - -function includesToken(list, type) { - for (let i = 0; i < list.length; ++i) - if (list[i].type === type) - return true; - return false; -} -function findNonEmptyIndex(list) { - for (let i = 0; i < list.length; ++i) { - switch (list[i].type) { - case 'space': - case 'comment': - case 'newline': - break; - default: - return i; - } - } - return -1; -} -function isFlowToken(token) { - switch (token?.type) { - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - case 'flow-collection': - return true; - default: - return false; - } -} -function getPrevProps(parent) { - switch (parent.type) { - case 'document': - return parent.start; - case 'block-map': { - const it = parent.items[parent.items.length - 1]; - return it.sep ?? it.start; - } - case 'block-seq': - return parent.items[parent.items.length - 1].start; - /* istanbul ignore next should not happen */ - default: - return []; - } -} -/** Note: May modify input array */ -function getFirstKeyStartProps(prev) { - if (prev.length === 0) - return []; - let i = prev.length; - loop: while (--i >= 0) { - switch (prev[i].type) { - case 'doc-start': - case 'explicit-key-ind': - case 'map-value-ind': - case 'seq-item-ind': - case 'newline': - break loop; - } - } - while (prev[++i]?.type === 'space') { - /* loop */ - } - return prev.splice(i, prev.length); -} -function fixFlowSeqItems(fc) { - if (fc.start.type === 'flow-seq-start') { - for (const it of fc.items) { - if (it.sep && - !it.value && - !includesToken(it.start, 'explicit-key-ind') && - !includesToken(it.sep, 'map-value-ind')) { - if (it.key) - it.value = it.key; - delete it.key; - if (isFlowToken(it.value)) { - if (it.value.end) - Array.prototype.push.apply(it.value.end, it.sep); - else - it.value.end = it.sep; - } - else - Array.prototype.push.apply(it.start, it.sep); - delete it.sep; - } - } - } -} -/** - * A YAML concrete syntax tree (CST) parser - * - * ```ts - * const src: string = ... - * for (const token of new Parser().parse(src)) { - * // token: Token - * } - * ``` - * - * To use the parser with a user-provided lexer: - * - * ```ts - * function* parse(source: string, lexer: Lexer) { - * const parser = new Parser() - * for (const lexeme of lexer.lex(source)) - * yield* parser.next(lexeme) - * yield* parser.end() - * } - * - * const src: string = ... - * const lexer = new Lexer() - * for (const token of parse(src, lexer)) { - * // token: Token - * } - * ``` - */ -class Parser { - /** - * @param onNewLine - If defined, called separately with the start position of - * each new line (in `parse()`, including the start of input). - */ - constructor(onNewLine) { - /** If true, space and sequence indicators count as indentation */ - this.atNewLine = true; - /** If true, next token is a scalar value */ - this.atScalar = false; - /** Current indentation level */ - this.indent = 0; - /** Current offset since the start of parsing */ - this.offset = 0; - /** On the same line with a block map key */ - this.onKeyLine = false; - /** Top indicates the node that's currently being built */ - this.stack = []; - /** The source of the current token, set in parse() */ - this.source = ''; - /** The type of the current token, set in parse() */ - this.type = ''; - // Must be defined after `next()` - this.lexer = new lexer.Lexer(); - this.onNewLine = onNewLine; - } - /** - * Parse `source` as a YAML stream. - * If `incomplete`, a part of the last line may be left as a buffer for the next call. - * - * Errors are not thrown, but yielded as `{ type: 'error', message }` tokens. - * - * @returns A generator of tokens representing each directive, document, and other structure. - */ - *parse(source, incomplete = false) { - if (this.onNewLine && this.offset === 0) - this.onNewLine(0); - for (const lexeme of this.lexer.lex(source, incomplete)) - yield* this.next(lexeme); - if (!incomplete) - yield* this.end(); - } - /** - * Advance the parser by the `source` of one lexical token. - */ - *next(source) { - this.source = source; - if (process.env.LOG_TOKENS) - console.log('|', cst.prettyToken(source)); - if (this.atScalar) { - this.atScalar = false; - yield* this.step(); - this.offset += source.length; - return; - } - const type = cst.tokenType(source); - if (!type) { - const message = `Not a YAML token: ${source}`; - yield* this.pop({ type: 'error', offset: this.offset, message, source }); - this.offset += source.length; - } - else if (type === 'scalar') { - this.atNewLine = false; - this.atScalar = true; - this.type = 'scalar'; - } - else { - this.type = type; - yield* this.step(); - switch (type) { - case 'newline': - this.atNewLine = true; - this.indent = 0; - if (this.onNewLine) - this.onNewLine(this.offset + source.length); - break; - case 'space': - if (this.atNewLine && source[0] === ' ') - this.indent += source.length; - break; - case 'explicit-key-ind': - case 'map-value-ind': - case 'seq-item-ind': - if (this.atNewLine) - this.indent += source.length; - break; - case 'doc-mode': - case 'flow-error-end': - return; - default: - this.atNewLine = false; - } - this.offset += source.length; - } - } - /** Call at end of input to push out any remaining constructions */ - *end() { - while (this.stack.length > 0) - yield* this.pop(); - } - get sourceToken() { - const st = { - type: this.type, - offset: this.offset, - indent: this.indent, - source: this.source - }; - return st; - } - *step() { - const top = this.peek(1); - if (this.type === 'doc-end' && (!top || top.type !== 'doc-end')) { - while (this.stack.length > 0) - yield* this.pop(); - this.stack.push({ - type: 'doc-end', - offset: this.offset, - source: this.source - }); - return; - } - if (!top) - return yield* this.stream(); - switch (top.type) { - case 'document': - return yield* this.document(top); - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - return yield* this.scalar(top); - case 'block-scalar': - return yield* this.blockScalar(top); - case 'block-map': - return yield* this.blockMap(top); - case 'block-seq': - return yield* this.blockSequence(top); - case 'flow-collection': - return yield* this.flowCollection(top); - case 'doc-end': - return yield* this.documentEnd(top); - } - /* istanbul ignore next should not happen */ - yield* this.pop(); - } - peek(n) { - return this.stack[this.stack.length - n]; - } - *pop(error) { - const token = error ?? this.stack.pop(); - /* istanbul ignore if should not happen */ - if (!token) { - const message = 'Tried to pop an empty stack'; - yield { type: 'error', offset: this.offset, source: '', message }; - } - else if (this.stack.length === 0) { - yield token; - } - else { - const top = this.peek(1); - if (token.type === 'block-scalar') { - // Block scalars use their parent rather than header indent - token.indent = 'indent' in top ? top.indent : 0; - } - else if (token.type === 'flow-collection' && top.type === 'document') { - // Ignore all indent for top-level flow collections - token.indent = 0; - } - if (token.type === 'flow-collection') - fixFlowSeqItems(token); - switch (top.type) { - case 'document': - top.value = token; - break; - case 'block-scalar': - top.props.push(token); // error - break; - case 'block-map': { - const it = top.items[top.items.length - 1]; - if (it.value) { - top.items.push({ start: [], key: token, sep: [] }); - this.onKeyLine = true; - return; - } - else if (it.sep) { - it.value = token; - } - else { - Object.assign(it, { key: token, sep: [] }); - this.onKeyLine = !includesToken(it.start, 'explicit-key-ind'); - return; - } - break; - } - case 'block-seq': { - const it = top.items[top.items.length - 1]; - if (it.value) - top.items.push({ start: [], value: token }); - else - it.value = token; - break; - } - case 'flow-collection': { - const it = top.items[top.items.length - 1]; - if (!it || it.value) - top.items.push({ start: [], key: token, sep: [] }); - else if (it.sep) - it.value = token; - else - Object.assign(it, { key: token, sep: [] }); - return; - } - /* istanbul ignore next should not happen */ - default: - yield* this.pop(); - yield* this.pop(token); - } - if ((top.type === 'document' || - top.type === 'block-map' || - top.type === 'block-seq') && - (token.type === 'block-map' || token.type === 'block-seq')) { - const last = token.items[token.items.length - 1]; - if (last && - !last.sep && - !last.value && - last.start.length > 0 && - findNonEmptyIndex(last.start) === -1 && - (token.indent === 0 || - last.start.every(st => st.type !== 'comment' || st.indent < token.indent))) { - if (top.type === 'document') - top.end = last.start; - else - top.items.push({ start: last.start }); - token.items.splice(-1, 1); - } - } - } - } - *stream() { - switch (this.type) { - case 'directive-line': - yield { type: 'directive', offset: this.offset, source: this.source }; - return; - case 'byte-order-mark': - case 'space': - case 'comment': - case 'newline': - yield this.sourceToken; - return; - case 'doc-mode': - case 'doc-start': { - const doc = { - type: 'document', - offset: this.offset, - start: [] - }; - if (this.type === 'doc-start') - doc.start.push(this.sourceToken); - this.stack.push(doc); - return; - } - } - yield { - type: 'error', - offset: this.offset, - message: `Unexpected ${this.type} token in YAML stream`, - source: this.source - }; - } - *document(doc) { - if (doc.value) - return yield* this.lineEnd(doc); - switch (this.type) { - case 'doc-start': { - if (findNonEmptyIndex(doc.start) !== -1) { - yield* this.pop(); - yield* this.step(); - } - else - doc.start.push(this.sourceToken); - return; - } - case 'anchor': - case 'tag': - case 'space': - case 'comment': - case 'newline': - doc.start.push(this.sourceToken); - return; - } - const bv = this.startBlockValue(doc); - if (bv) - this.stack.push(bv); - else { - yield { - type: 'error', - offset: this.offset, - message: `Unexpected ${this.type} token in YAML document`, - source: this.source - }; - } - } - *scalar(scalar) { - if (this.type === 'map-value-ind') { - const prev = getPrevProps(this.peek(2)); - const start = getFirstKeyStartProps(prev); - let sep; - if (scalar.end) { - sep = scalar.end; - sep.push(this.sourceToken); - delete scalar.end; - } - else - sep = [this.sourceToken]; - const map = { - type: 'block-map', - offset: scalar.offset, - indent: scalar.indent, - items: [{ start, key: scalar, sep }] - }; - this.onKeyLine = true; - this.stack[this.stack.length - 1] = map; - } - else - yield* this.lineEnd(scalar); - } - *blockScalar(scalar) { - switch (this.type) { - case 'space': - case 'comment': - case 'newline': - scalar.props.push(this.sourceToken); - return; - case 'scalar': - scalar.source = this.source; - // block-scalar source includes trailing newline - this.atNewLine = true; - this.indent = 0; - if (this.onNewLine) { - let nl = this.source.indexOf('\n') + 1; - while (nl !== 0) { - this.onNewLine(this.offset + nl); - nl = this.source.indexOf('\n', nl) + 1; - } - } - yield* this.pop(); - break; - /* istanbul ignore next should not happen */ - default: - yield* this.pop(); - yield* this.step(); - } - } - *blockMap(map) { - const it = map.items[map.items.length - 1]; - // it.sep is true-ish if pair already has key or : separator - switch (this.type) { - case 'newline': - this.onKeyLine = false; - if (it.value) { - const end = 'end' in it.value ? it.value.end : undefined; - const last = Array.isArray(end) ? end[end.length - 1] : undefined; - if (last?.type === 'comment') - end?.push(this.sourceToken); - else - map.items.push({ start: [this.sourceToken] }); - } - else if (it.sep) { - it.sep.push(this.sourceToken); - } - else { - it.start.push(this.sourceToken); - } - return; - case 'space': - case 'comment': - if (it.value) { - map.items.push({ start: [this.sourceToken] }); - } - else if (it.sep) { - it.sep.push(this.sourceToken); - } - else { - if (this.atIndentedComment(it.start, map.indent)) { - const prev = map.items[map.items.length - 2]; - const end = prev?.value?.end; - if (Array.isArray(end)) { - Array.prototype.push.apply(end, it.start); - end.push(this.sourceToken); - map.items.pop(); - return; - } - } - it.start.push(this.sourceToken); - } - return; - } - if (this.indent >= map.indent) { - const atNextItem = !this.onKeyLine && this.indent === map.indent && it.sep; - // For empty nodes, assign newline-separated not indented empty tokens to following node - let start = []; - if (atNextItem && it.sep && !it.value) { - const nl = []; - for (let i = 0; i < it.sep.length; ++i) { - const st = it.sep[i]; - switch (st.type) { - case 'newline': - nl.push(i); - break; - case 'space': - break; - case 'comment': - if (st.indent > map.indent) - nl.length = 0; - break; - default: - nl.length = 0; - } - } - if (nl.length >= 2) - start = it.sep.splice(nl[1]); - } - switch (this.type) { - case 'anchor': - case 'tag': - if (atNextItem || it.value) { - start.push(this.sourceToken); - map.items.push({ start }); - this.onKeyLine = true; - } - else if (it.sep) { - it.sep.push(this.sourceToken); - } - else { - it.start.push(this.sourceToken); - } - return; - case 'explicit-key-ind': - if (!it.sep && !includesToken(it.start, 'explicit-key-ind')) { - it.start.push(this.sourceToken); - } - else if (atNextItem || it.value) { - start.push(this.sourceToken); - map.items.push({ start }); - } - else { - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start: [this.sourceToken] }] - }); - } - this.onKeyLine = true; - return; - case 'map-value-ind': - if (includesToken(it.start, 'explicit-key-ind')) { - if (!it.sep) { - if (includesToken(it.start, 'newline')) { - Object.assign(it, { key: null, sep: [this.sourceToken] }); - } - else { - const start = getFirstKeyStartProps(it.start); - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key: null, sep: [this.sourceToken] }] - }); - } - } - else if (it.value) { - map.items.push({ start: [], key: null, sep: [this.sourceToken] }); - } - else if (includesToken(it.sep, 'map-value-ind')) { - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key: null, sep: [this.sourceToken] }] - }); - } - else if (isFlowToken(it.key) && - !includesToken(it.sep, 'newline')) { - const start = getFirstKeyStartProps(it.start); - const key = it.key; - const sep = it.sep; - sep.push(this.sourceToken); - // @ts-expect-error type guard is wrong here - delete it.key, delete it.sep; - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key, sep }] - }); - } - else if (start.length > 0) { - // Not actually at next item - it.sep = it.sep.concat(start, this.sourceToken); - } - else { - it.sep.push(this.sourceToken); - } - } - else { - if (!it.sep) { - Object.assign(it, { key: null, sep: [this.sourceToken] }); - } - else if (it.value || atNextItem) { - map.items.push({ start, key: null, sep: [this.sourceToken] }); - } - else if (includesToken(it.sep, 'map-value-ind')) { - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start: [], key: null, sep: [this.sourceToken] }] - }); - } - else { - it.sep.push(this.sourceToken); - } - } - this.onKeyLine = true; - return; - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': { - const fs = this.flowScalar(this.type); - if (atNextItem || it.value) { - map.items.push({ start, key: fs, sep: [] }); - this.onKeyLine = true; - } - else if (it.sep) { - this.stack.push(fs); - } - else { - Object.assign(it, { key: fs, sep: [] }); - this.onKeyLine = true; - } - return; - } - default: { - const bv = this.startBlockValue(map); - if (bv) { - if (atNextItem && - bv.type !== 'block-seq' && - includesToken(it.start, 'explicit-key-ind')) { - map.items.push({ start }); - } - this.stack.push(bv); - return; - } - } - } - } - yield* this.pop(); - yield* this.step(); - } - *blockSequence(seq) { - const it = seq.items[seq.items.length - 1]; - switch (this.type) { - case 'newline': - if (it.value) { - const end = 'end' in it.value ? it.value.end : undefined; - const last = Array.isArray(end) ? end[end.length - 1] : undefined; - if (last?.type === 'comment') - end?.push(this.sourceToken); - else - seq.items.push({ start: [this.sourceToken] }); - } - else - it.start.push(this.sourceToken); - return; - case 'space': - case 'comment': - if (it.value) - seq.items.push({ start: [this.sourceToken] }); - else { - if (this.atIndentedComment(it.start, seq.indent)) { - const prev = seq.items[seq.items.length - 2]; - const end = prev?.value?.end; - if (Array.isArray(end)) { - Array.prototype.push.apply(end, it.start); - end.push(this.sourceToken); - seq.items.pop(); - return; - } - } - it.start.push(this.sourceToken); - } - return; - case 'anchor': - case 'tag': - if (it.value || this.indent <= seq.indent) - break; - it.start.push(this.sourceToken); - return; - case 'seq-item-ind': - if (this.indent !== seq.indent) - break; - if (it.value || includesToken(it.start, 'seq-item-ind')) - seq.items.push({ start: [this.sourceToken] }); - else - it.start.push(this.sourceToken); - return; - } - if (this.indent > seq.indent) { - const bv = this.startBlockValue(seq); - if (bv) { - this.stack.push(bv); - return; - } - } - yield* this.pop(); - yield* this.step(); - } - *flowCollection(fc) { - const it = fc.items[fc.items.length - 1]; - if (this.type === 'flow-error-end') { - let top; - do { - yield* this.pop(); - top = this.peek(1); - } while (top && top.type === 'flow-collection'); - } - else if (fc.end.length === 0) { - switch (this.type) { - case 'comma': - case 'explicit-key-ind': - if (!it || it.sep) - fc.items.push({ start: [this.sourceToken] }); - else - it.start.push(this.sourceToken); - return; - case 'map-value-ind': - if (!it || it.value) - fc.items.push({ start: [], key: null, sep: [this.sourceToken] }); - else if (it.sep) - it.sep.push(this.sourceToken); - else - Object.assign(it, { key: null, sep: [this.sourceToken] }); - return; - case 'space': - case 'comment': - case 'newline': - case 'anchor': - case 'tag': - if (!it || it.value) - fc.items.push({ start: [this.sourceToken] }); - else if (it.sep) - it.sep.push(this.sourceToken); - else - it.start.push(this.sourceToken); - return; - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': { - const fs = this.flowScalar(this.type); - if (!it || it.value) - fc.items.push({ start: [], key: fs, sep: [] }); - else if (it.sep) - this.stack.push(fs); - else - Object.assign(it, { key: fs, sep: [] }); - return; - } - case 'flow-map-end': - case 'flow-seq-end': - fc.end.push(this.sourceToken); - return; - } - const bv = this.startBlockValue(fc); - /* istanbul ignore else should not happen */ - if (bv) - this.stack.push(bv); - else { - yield* this.pop(); - yield* this.step(); - } - } - else { - const parent = this.peek(2); - if (parent.type === 'block-map' && - ((this.type === 'map-value-ind' && parent.indent === fc.indent) || - (this.type === 'newline' && - !parent.items[parent.items.length - 1].sep))) { - yield* this.pop(); - yield* this.step(); - } - else if (this.type === 'map-value-ind' && - parent.type !== 'flow-collection') { - const prev = getPrevProps(parent); - const start = getFirstKeyStartProps(prev); - fixFlowSeqItems(fc); - const sep = fc.end.splice(1, fc.end.length); - sep.push(this.sourceToken); - const map = { - type: 'block-map', - offset: fc.offset, - indent: fc.indent, - items: [{ start, key: fc, sep }] - }; - this.onKeyLine = true; - this.stack[this.stack.length - 1] = map; - } - else { - yield* this.lineEnd(fc); - } - } - } - flowScalar(type) { - if (this.onNewLine) { - let nl = this.source.indexOf('\n') + 1; - while (nl !== 0) { - this.onNewLine(this.offset + nl); - nl = this.source.indexOf('\n', nl) + 1; - } - } - return { - type, - offset: this.offset, - indent: this.indent, - source: this.source - }; - } - startBlockValue(parent) { - switch (this.type) { - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - return this.flowScalar(this.type); - case 'block-scalar-header': - return { - type: 'block-scalar', - offset: this.offset, - indent: this.indent, - props: [this.sourceToken], - source: '' - }; - case 'flow-map-start': - case 'flow-seq-start': - return { - type: 'flow-collection', - offset: this.offset, - indent: this.indent, - start: this.sourceToken, - items: [], - end: [] - }; - case 'seq-item-ind': - return { - type: 'block-seq', - offset: this.offset, - indent: this.indent, - items: [{ start: [this.sourceToken] }] - }; - case 'explicit-key-ind': { - this.onKeyLine = true; - const prev = getPrevProps(parent); - const start = getFirstKeyStartProps(prev); - start.push(this.sourceToken); - return { - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start }] - }; - } - case 'map-value-ind': { - this.onKeyLine = true; - const prev = getPrevProps(parent); - const start = getFirstKeyStartProps(prev); - return { - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key: null, sep: [this.sourceToken] }] - }; - } - } - return null; - } - atIndentedComment(start, indent) { - if (this.type !== 'comment') - return false; - if (this.indent <= indent) - return false; - return start.every(st => st.type === 'newline' || st.type === 'space'); - } - *documentEnd(docEnd) { - if (this.type !== 'doc-mode') { - if (docEnd.end) - docEnd.end.push(this.sourceToken); - else - docEnd.end = [this.sourceToken]; - if (this.type === 'newline') - yield* this.pop(); - } - } - *lineEnd(token) { - switch (this.type) { - case 'comma': - case 'doc-start': - case 'doc-end': - case 'flow-seq-end': - case 'flow-map-end': - case 'map-value-ind': - yield* this.pop(); - yield* this.step(); - break; - case 'newline': - this.onKeyLine = false; - // fallthrough - case 'space': - case 'comment': - default: - // all other values are errors - if (token.end) - token.end.push(this.sourceToken); - else - token.end = [this.sourceToken]; - if (this.type === 'newline') - yield* this.pop(); - } - } -} - -exports.Parser = Parser; - - -/***/ }), - -/***/ 8649: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var composer = __nccwpck_require__(9493); -var Document = __nccwpck_require__(42); -var errors = __nccwpck_require__(4236); -var log = __nccwpck_require__(6909); -var lineCounter = __nccwpck_require__(1929); -var parser = __nccwpck_require__(3328); - -function parseOptions(options) { - const prettyErrors = options.prettyErrors !== false; - const lineCounter$1 = options.lineCounter || (prettyErrors && new lineCounter.LineCounter()) || null; - return { lineCounter: lineCounter$1, prettyErrors }; -} -/** - * Parse the input as a stream of YAML documents. - * - * Documents should be separated from each other by `...` or `---` marker lines. - * - * @returns If an empty `docs` array is returned, it will be of type - * EmptyStream and contain additional stream information. In - * TypeScript, you should use `'empty' in docs` as a type guard for it. - */ -function parseAllDocuments(source, options = {}) { - const { lineCounter, prettyErrors } = parseOptions(options); - const parser$1 = new parser.Parser(lineCounter?.addNewLine); - const composer$1 = new composer.Composer(options); - const docs = Array.from(composer$1.compose(parser$1.parse(source))); - if (prettyErrors && lineCounter) - for (const doc of docs) { - doc.errors.forEach(errors.prettifyError(source, lineCounter)); - doc.warnings.forEach(errors.prettifyError(source, lineCounter)); - } - if (docs.length > 0) - return docs; - return Object.assign([], { empty: true }, composer$1.streamInfo()); -} -/** Parse an input string into a single YAML.Document */ -function parseDocument(source, options = {}) { - const { lineCounter, prettyErrors } = parseOptions(options); - const parser$1 = new parser.Parser(lineCounter?.addNewLine); - const composer$1 = new composer.Composer(options); - // `doc` is always set by compose.end(true) at the very latest - let doc = null; - for (const _doc of composer$1.compose(parser$1.parse(source), true, source.length)) { - if (!doc) - doc = _doc; - else if (doc.options.logLevel !== 'silent') { - doc.errors.push(new errors.YAMLParseError(_doc.range.slice(0, 2), 'MULTIPLE_DOCS', 'Source contains multiple documents; please use YAML.parseAllDocuments()')); - break; - } - } - if (prettyErrors && lineCounter) { - doc.errors.forEach(errors.prettifyError(source, lineCounter)); - doc.warnings.forEach(errors.prettifyError(source, lineCounter)); - } - return doc; -} -function parse(src, reviver, options) { - let _reviver = undefined; - if (typeof reviver === 'function') { - _reviver = reviver; - } - else if (options === undefined && reviver && typeof reviver === 'object') { - options = reviver; - } - const doc = parseDocument(src, options); - if (!doc) - return null; - doc.warnings.forEach(warning => log.warn(doc.options.logLevel, warning)); - if (doc.errors.length > 0) { - if (doc.options.logLevel !== 'silent') - throw doc.errors[0]; - else - doc.errors = []; - } - return doc.toJS(Object.assign({ reviver: _reviver }, options)); -} -function stringify(value, replacer, options) { - let _replacer = null; - if (typeof replacer === 'function' || Array.isArray(replacer)) { - _replacer = replacer; - } - else if (options === undefined && replacer) { - options = replacer; - } - if (typeof options === 'string') - options = options.length; - if (typeof options === 'number') { - const indent = Math.round(options); - options = indent < 1 ? undefined : indent > 8 ? { indent: 8 } : { indent }; - } - if (value === undefined) { - const { keepUndefined } = options ?? replacer ?? {}; - if (!keepUndefined) - return undefined; - } - return new Document.Document(value, _replacer, options).toString(options); -} - -exports.parse = parse; -exports.parseAllDocuments = parseAllDocuments; -exports.parseDocument = parseDocument; -exports.stringify = stringify; - - -/***/ }), - -/***/ 6831: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(1399); -var map = __nccwpck_require__(83); -var seq = __nccwpck_require__(1693); -var string = __nccwpck_require__(2201); -var tags = __nccwpck_require__(4138); - -const sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0; -class Schema { - constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }) { - this.compat = Array.isArray(compat) - ? tags.getTags(compat, 'compat') - : compat - ? tags.getTags(null, compat) - : null; - this.merge = !!merge; - this.name = (typeof schema === 'string' && schema) || 'core'; - this.knownTags = resolveKnownTags ? tags.coreKnownTags : {}; - this.tags = tags.getTags(customTags, this.name); - this.toStringOptions = toStringDefaults ?? null; - Object.defineProperty(this, Node.MAP, { value: map.map }); - Object.defineProperty(this, Node.SCALAR, { value: string.string }); - Object.defineProperty(this, Node.SEQ, { value: seq.seq }); - // Used by createMap() - this.sortMapEntries = - typeof sortMapEntries === 'function' - ? sortMapEntries - : sortMapEntries === true - ? sortMapEntriesByKey - : null; - } - clone() { - const copy = Object.create(Schema.prototype, Object.getOwnPropertyDescriptors(this)); - copy.tags = this.tags.slice(); - return copy; - } -} - -exports.Schema = Schema; - - -/***/ }), - -/***/ 83: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(1399); -var Pair = __nccwpck_require__(246); -var YAMLMap = __nccwpck_require__(6011); - -function createMap(schema, obj, ctx) { - const { keepUndefined, replacer } = ctx; - const map = new YAMLMap.YAMLMap(schema); - const add = (key, value) => { - if (typeof replacer === 'function') - value = replacer.call(obj, key, value); - else if (Array.isArray(replacer) && !replacer.includes(key)) - return; - if (value !== undefined || keepUndefined) - map.items.push(Pair.createPair(key, value, ctx)); - }; - if (obj instanceof Map) { - for (const [key, value] of obj) - add(key, value); - } - else if (obj && typeof obj === 'object') { - for (const key of Object.keys(obj)) - add(key, obj[key]); - } - if (typeof schema.sortMapEntries === 'function') { - map.items.sort(schema.sortMapEntries); - } - return map; -} -const map = { - collection: 'map', - createNode: createMap, - default: true, - nodeClass: YAMLMap.YAMLMap, - tag: 'tag:yaml.org,2002:map', - resolve(map, onError) { - if (!Node.isMap(map)) - onError('Expected a mapping for this tag'); - return map; - } -}; - -exports.map = map; - - -/***/ }), - -/***/ 6703: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(9338); - -const nullTag = { - identify: value => value == null, - createNode: () => new Scalar.Scalar(null), - default: true, - tag: 'tag:yaml.org,2002:null', - test: /^(?:~|[Nn]ull|NULL)?$/, - resolve: () => new Scalar.Scalar(null), - stringify: ({ source }, ctx) => typeof source === 'string' && nullTag.test.test(source) - ? source - : ctx.options.nullStr -}; - -exports.nullTag = nullTag; - - -/***/ }), - -/***/ 1693: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var createNode = __nccwpck_require__(9652); -var Node = __nccwpck_require__(1399); -var YAMLSeq = __nccwpck_require__(5161); - -function createSeq(schema, obj, ctx) { - const { replacer } = ctx; - const seq = new YAMLSeq.YAMLSeq(schema); - if (obj && Symbol.iterator in Object(obj)) { - let i = 0; - for (let it of obj) { - if (typeof replacer === 'function') { - const key = obj instanceof Set ? it : String(i++); - it = replacer.call(obj, key, it); - } - seq.items.push(createNode.createNode(it, undefined, ctx)); - } - } - return seq; -} -const seq = { - collection: 'seq', - createNode: createSeq, - default: true, - nodeClass: YAMLSeq.YAMLSeq, - tag: 'tag:yaml.org,2002:seq', - resolve(seq, onError) { - if (!Node.isSeq(seq)) - onError('Expected a sequence for this tag'); - return seq; - } -}; - -exports.seq = seq; - - -/***/ }), - -/***/ 2201: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var stringifyString = __nccwpck_require__(6226); - -const string = { - identify: value => typeof value === 'string', - default: true, - tag: 'tag:yaml.org,2002:str', - resolve: str => str, - stringify(item, ctx, onComment, onChompKeep) { - ctx = Object.assign({ actualString: true }, ctx); - return stringifyString.stringifyString(item, ctx, onComment, onChompKeep); - } -}; - -exports.string = string; - - -/***/ }), - -/***/ 2045: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(9338); - -const boolTag = { - identify: value => typeof value === 'boolean', - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/, - resolve: str => new Scalar.Scalar(str[0] === 't' || str[0] === 'T'), - stringify({ source, value }, ctx) { - if (source && boolTag.test.test(source)) { - const sv = source[0] === 't' || source[0] === 'T'; - if (value === sv) - return source; - } - return value ? ctx.options.trueStr : ctx.options.falseStr; - } -}; - -exports.boolTag = boolTag; - - -/***/ }), - -/***/ 6810: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(9338); -var stringifyNumber = __nccwpck_require__(4174); - -const floatNaN = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^(?:[-+]?\.(?:inf|Inf|INF|nan|NaN|NAN))$/, - resolve: str => str.slice(-3).toLowerCase() === 'nan' - ? NaN - : str[0] === '-' - ? Number.NEGATIVE_INFINITY - : Number.POSITIVE_INFINITY, - stringify: stringifyNumber.stringifyNumber -}; -const floatExp = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - format: 'EXP', - test: /^[-+]?(?:\.[0-9]+|[0-9]+(?:\.[0-9]*)?)[eE][-+]?[0-9]+$/, - resolve: str => parseFloat(str), - stringify(node) { - const num = Number(node.value); - return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node); - } -}; -const float = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^[-+]?(?:\.[0-9]+|[0-9]+\.[0-9]*)$/, - resolve(str) { - const node = new Scalar.Scalar(parseFloat(str)); - const dot = str.indexOf('.'); - if (dot !== -1 && str[str.length - 1] === '0') - node.minFractionDigits = str.length - dot - 1; - return node; - }, - stringify: stringifyNumber.stringifyNumber -}; - -exports.float = float; -exports.floatExp = floatExp; -exports.floatNaN = floatNaN; - - -/***/ }), - -/***/ 3019: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var stringifyNumber = __nccwpck_require__(4174); - -const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value); -const intResolve = (str, offset, radix, { intAsBigInt }) => (intAsBigInt ? BigInt(str) : parseInt(str.substring(offset), radix)); -function intStringify(node, radix, prefix) { - const { value } = node; - if (intIdentify(value) && value >= 0) - return prefix + value.toString(radix); - return stringifyNumber.stringifyNumber(node); -} -const intOct = { - identify: value => intIdentify(value) && value >= 0, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'OCT', - test: /^0o[0-7]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 8, opt), - stringify: node => intStringify(node, 8, '0o') -}; -const int = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - test: /^[-+]?[0-9]+$/, - resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt), - stringify: stringifyNumber.stringifyNumber -}; -const intHex = { - identify: value => intIdentify(value) && value >= 0, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'HEX', - test: /^0x[0-9a-fA-F]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt), - stringify: node => intStringify(node, 16, '0x') -}; - -exports.int = int; -exports.intHex = intHex; -exports.intOct = intOct; - - -/***/ }), - -/***/ 27: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var map = __nccwpck_require__(83); -var _null = __nccwpck_require__(6703); -var seq = __nccwpck_require__(1693); -var string = __nccwpck_require__(2201); -var bool = __nccwpck_require__(2045); -var float = __nccwpck_require__(6810); -var int = __nccwpck_require__(3019); - -const schema = [ - map.map, - seq.seq, - string.string, - _null.nullTag, - bool.boolTag, - int.intOct, - int.int, - int.intHex, - float.floatNaN, - float.floatExp, - float.float -]; - -exports.schema = schema; - - -/***/ }), - -/***/ 4545: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(9338); -var map = __nccwpck_require__(83); -var seq = __nccwpck_require__(1693); - -function intIdentify(value) { - return typeof value === 'bigint' || Number.isInteger(value); -} -const stringifyJSON = ({ value }) => JSON.stringify(value); -const jsonScalars = [ - { - identify: value => typeof value === 'string', - default: true, - tag: 'tag:yaml.org,2002:str', - resolve: str => str, - stringify: stringifyJSON - }, - { - identify: value => value == null, - createNode: () => new Scalar.Scalar(null), - default: true, - tag: 'tag:yaml.org,2002:null', - test: /^null$/, - resolve: () => null, - stringify: stringifyJSON - }, - { - identify: value => typeof value === 'boolean', - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^true|false$/, - resolve: str => str === 'true', - stringify: stringifyJSON - }, - { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - test: /^-?(?:0|[1-9][0-9]*)$/, - resolve: (str, _onError, { intAsBigInt }) => intAsBigInt ? BigInt(str) : parseInt(str, 10), - stringify: ({ value }) => intIdentify(value) ? value.toString() : JSON.stringify(value) - }, - { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^-?(?:0|[1-9][0-9]*)(?:\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/, - resolve: str => parseFloat(str), - stringify: stringifyJSON - } -]; -const jsonError = { - default: true, - tag: '', - test: /^/, - resolve(str, onError) { - onError(`Unresolved plain scalar ${JSON.stringify(str)}`); - return str; - } -}; -const schema = [map.map, seq.seq].concat(jsonScalars, jsonError); - -exports.schema = schema; - - -/***/ }), - -/***/ 4138: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var map = __nccwpck_require__(83); -var _null = __nccwpck_require__(6703); -var seq = __nccwpck_require__(1693); -var string = __nccwpck_require__(2201); -var bool = __nccwpck_require__(2045); -var float = __nccwpck_require__(6810); -var int = __nccwpck_require__(3019); -var schema = __nccwpck_require__(27); -var schema$1 = __nccwpck_require__(4545); -var binary = __nccwpck_require__(5724); -var omap = __nccwpck_require__(8974); -var pairs = __nccwpck_require__(9841); -var schema$2 = __nccwpck_require__(5389); -var set = __nccwpck_require__(7847); -var timestamp = __nccwpck_require__(1156); - -const schemas = new Map([ - ['core', schema.schema], - ['failsafe', [map.map, seq.seq, string.string]], - ['json', schema$1.schema], - ['yaml11', schema$2.schema], - ['yaml-1.1', schema$2.schema] -]); -const tagsByName = { - binary: binary.binary, - bool: bool.boolTag, - float: float.float, - floatExp: float.floatExp, - floatNaN: float.floatNaN, - floatTime: timestamp.floatTime, - int: int.int, - intHex: int.intHex, - intOct: int.intOct, - intTime: timestamp.intTime, - map: map.map, - null: _null.nullTag, - omap: omap.omap, - pairs: pairs.pairs, - seq: seq.seq, - set: set.set, - timestamp: timestamp.timestamp -}; -const coreKnownTags = { - 'tag:yaml.org,2002:binary': binary.binary, - 'tag:yaml.org,2002:omap': omap.omap, - 'tag:yaml.org,2002:pairs': pairs.pairs, - 'tag:yaml.org,2002:set': set.set, - 'tag:yaml.org,2002:timestamp': timestamp.timestamp -}; -function getTags(customTags, schemaName) { - let tags = schemas.get(schemaName); - if (!tags) { - if (Array.isArray(customTags)) - tags = []; - else { - const keys = Array.from(schemas.keys()) - .filter(key => key !== 'yaml11') - .map(key => JSON.stringify(key)) - .join(', '); - throw new Error(`Unknown schema "${schemaName}"; use one of ${keys} or define customTags array`); - } - } - if (Array.isArray(customTags)) { - for (const tag of customTags) - tags = tags.concat(tag); - } - else if (typeof customTags === 'function') { - tags = customTags(tags.slice()); - } - return tags.map(tag => { - if (typeof tag !== 'string') - return tag; - const tagObj = tagsByName[tag]; - if (tagObj) - return tagObj; - const keys = Object.keys(tagsByName) - .map(key => JSON.stringify(key)) - .join(', '); - throw new Error(`Unknown custom tag "${tag}"; use one of ${keys}`); - }); -} - -exports.coreKnownTags = coreKnownTags; -exports.getTags = getTags; - - -/***/ }), - -/***/ 5724: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(9338); -var stringifyString = __nccwpck_require__(6226); - -const binary = { - identify: value => value instanceof Uint8Array, - default: false, - tag: 'tag:yaml.org,2002:binary', - /** - * Returns a Buffer in node and an Uint8Array in browsers - * - * To use the resulting buffer as an image, you'll want to do something like: - * - * const blob = new Blob([buffer], { type: 'image/jpeg' }) - * document.querySelector('#photo').src = URL.createObjectURL(blob) - */ - resolve(src, onError) { - if (typeof Buffer === 'function') { - return Buffer.from(src, 'base64'); - } - else if (typeof atob === 'function') { - // On IE 11, atob() can't handle newlines - const str = atob(src.replace(/[\n\r]/g, '')); - const buffer = new Uint8Array(str.length); - for (let i = 0; i < str.length; ++i) - buffer[i] = str.charCodeAt(i); - return buffer; - } - else { - onError('This environment does not support reading binary tags; either Buffer or atob is required'); - return src; - } - }, - stringify({ comment, type, value }, ctx, onComment, onChompKeep) { - const buf = value; // checked earlier by binary.identify() - let str; - if (typeof Buffer === 'function') { - str = - buf instanceof Buffer - ? buf.toString('base64') - : Buffer.from(buf.buffer).toString('base64'); - } - else if (typeof btoa === 'function') { - let s = ''; - for (let i = 0; i < buf.length; ++i) - s += String.fromCharCode(buf[i]); - str = btoa(s); - } - else { - throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required'); - } - if (!type) - type = Scalar.Scalar.BLOCK_LITERAL; - if (type !== Scalar.Scalar.QUOTE_DOUBLE) { - const lineWidth = Math.max(ctx.options.lineWidth - ctx.indent.length, ctx.options.minContentWidth); - const n = Math.ceil(str.length / lineWidth); - const lines = new Array(n); - for (let i = 0, o = 0; i < n; ++i, o += lineWidth) { - lines[i] = str.substr(o, lineWidth); - } - str = lines.join(type === Scalar.Scalar.BLOCK_LITERAL ? '\n' : ' '); - } - return stringifyString.stringifyString({ comment, type, value: str }, ctx, onComment, onChompKeep); - } -}; - -exports.binary = binary; - - -/***/ }), - -/***/ 2631: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(9338); - -function boolStringify({ value, source }, ctx) { - const boolObj = value ? trueTag : falseTag; - if (source && boolObj.test.test(source)) - return source; - return value ? ctx.options.trueStr : ctx.options.falseStr; -} -const trueTag = { - identify: value => value === true, - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/, - resolve: () => new Scalar.Scalar(true), - stringify: boolStringify -}; -const falseTag = { - identify: value => value === false, - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/i, - resolve: () => new Scalar.Scalar(false), - stringify: boolStringify -}; - -exports.falseTag = falseTag; -exports.trueTag = trueTag; - - -/***/ }), - -/***/ 8035: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(9338); -var stringifyNumber = __nccwpck_require__(4174); - -const floatNaN = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^[-+]?\.(?:inf|Inf|INF|nan|NaN|NAN)$/, - resolve: (str) => str.slice(-3).toLowerCase() === 'nan' - ? NaN - : str[0] === '-' - ? Number.NEGATIVE_INFINITY - : Number.POSITIVE_INFINITY, - stringify: stringifyNumber.stringifyNumber -}; -const floatExp = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - format: 'EXP', - test: /^[-+]?(?:[0-9][0-9_]*)?(?:\.[0-9_]*)?[eE][-+]?[0-9]+$/, - resolve: (str) => parseFloat(str.replace(/_/g, '')), - stringify(node) { - const num = Number(node.value); - return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node); - } -}; -const float = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^[-+]?(?:[0-9][0-9_]*)?\.[0-9_]*$/, - resolve(str) { - const node = new Scalar.Scalar(parseFloat(str.replace(/_/g, ''))); - const dot = str.indexOf('.'); - if (dot !== -1) { - const f = str.substring(dot + 1).replace(/_/g, ''); - if (f[f.length - 1] === '0') - node.minFractionDigits = f.length; - } - return node; - }, - stringify: stringifyNumber.stringifyNumber -}; - -exports.float = float; -exports.floatExp = floatExp; -exports.floatNaN = floatNaN; - - -/***/ }), - -/***/ 9503: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var stringifyNumber = __nccwpck_require__(4174); - -const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value); -function intResolve(str, offset, radix, { intAsBigInt }) { - const sign = str[0]; - if (sign === '-' || sign === '+') - offset += 1; - str = str.substring(offset).replace(/_/g, ''); - if (intAsBigInt) { - switch (radix) { - case 2: - str = `0b${str}`; - break; - case 8: - str = `0o${str}`; - break; - case 16: - str = `0x${str}`; - break; - } - const n = BigInt(str); - return sign === '-' ? BigInt(-1) * n : n; - } - const n = parseInt(str, radix); - return sign === '-' ? -1 * n : n; -} -function intStringify(node, radix, prefix) { - const { value } = node; - if (intIdentify(value)) { - const str = value.toString(radix); - return value < 0 ? '-' + prefix + str.substr(1) : prefix + str; - } - return stringifyNumber.stringifyNumber(node); -} -const intBin = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'BIN', - test: /^[-+]?0b[0-1_]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 2, opt), - stringify: node => intStringify(node, 2, '0b') -}; -const intOct = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'OCT', - test: /^[-+]?0[0-7_]+$/, - resolve: (str, _onError, opt) => intResolve(str, 1, 8, opt), - stringify: node => intStringify(node, 8, '0') -}; -const int = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - test: /^[-+]?[0-9][0-9_]*$/, - resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt), - stringify: stringifyNumber.stringifyNumber -}; -const intHex = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'HEX', - test: /^[-+]?0x[0-9a-fA-F_]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt), - stringify: node => intStringify(node, 16, '0x') -}; - -exports.int = int; -exports.intBin = intBin; -exports.intHex = intHex; -exports.intOct = intOct; - - -/***/ }), - -/***/ 8974: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var YAMLSeq = __nccwpck_require__(5161); -var toJS = __nccwpck_require__(2463); -var Node = __nccwpck_require__(1399); -var YAMLMap = __nccwpck_require__(6011); -var pairs = __nccwpck_require__(9841); - -class YAMLOMap extends YAMLSeq.YAMLSeq { - constructor() { - super(); - this.add = YAMLMap.YAMLMap.prototype.add.bind(this); - this.delete = YAMLMap.YAMLMap.prototype.delete.bind(this); - this.get = YAMLMap.YAMLMap.prototype.get.bind(this); - this.has = YAMLMap.YAMLMap.prototype.has.bind(this); - this.set = YAMLMap.YAMLMap.prototype.set.bind(this); - this.tag = YAMLOMap.tag; - } - /** - * If `ctx` is given, the return type is actually `Map`, - * but TypeScript won't allow widening the signature of a child method. - */ - toJSON(_, ctx) { - if (!ctx) - return super.toJSON(_); - const map = new Map(); - if (ctx?.onCreate) - ctx.onCreate(map); - for (const pair of this.items) { - let key, value; - if (Node.isPair(pair)) { - key = toJS.toJS(pair.key, '', ctx); - value = toJS.toJS(pair.value, key, ctx); - } - else { - key = toJS.toJS(pair, '', ctx); - } - if (map.has(key)) - throw new Error('Ordered maps must not include duplicate keys'); - map.set(key, value); - } - return map; - } -} -YAMLOMap.tag = 'tag:yaml.org,2002:omap'; -const omap = { - collection: 'seq', - identify: value => value instanceof Map, - nodeClass: YAMLOMap, - default: false, - tag: 'tag:yaml.org,2002:omap', - resolve(seq, onError) { - const pairs$1 = pairs.resolvePairs(seq, onError); - const seenKeys = []; - for (const { key } of pairs$1.items) { - if (Node.isScalar(key)) { - if (seenKeys.includes(key.value)) { - onError(`Ordered maps must not include duplicate keys: ${key.value}`); - } - else { - seenKeys.push(key.value); - } - } - } - return Object.assign(new YAMLOMap(), pairs$1); - }, - createNode(schema, iterable, ctx) { - const pairs$1 = pairs.createPairs(schema, iterable, ctx); - const omap = new YAMLOMap(); - omap.items = pairs$1.items; - return omap; - } -}; - -exports.YAMLOMap = YAMLOMap; -exports.omap = omap; - - -/***/ }), - -/***/ 9841: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(1399); -var Pair = __nccwpck_require__(246); -var Scalar = __nccwpck_require__(9338); -var YAMLSeq = __nccwpck_require__(5161); - -function resolvePairs(seq, onError) { - if (Node.isSeq(seq)) { - for (let i = 0; i < seq.items.length; ++i) { - let item = seq.items[i]; - if (Node.isPair(item)) - continue; - else if (Node.isMap(item)) { - if (item.items.length > 1) - onError('Each pair must have its own sequence indicator'); - const pair = item.items[0] || new Pair.Pair(new Scalar.Scalar(null)); - if (item.commentBefore) - pair.key.commentBefore = pair.key.commentBefore - ? `${item.commentBefore}\n${pair.key.commentBefore}` - : item.commentBefore; - if (item.comment) { - const cn = pair.value ?? pair.key; - cn.comment = cn.comment - ? `${item.comment}\n${cn.comment}` - : item.comment; - } - item = pair; - } - seq.items[i] = Node.isPair(item) ? item : new Pair.Pair(item); - } - } - else - onError('Expected a sequence for this tag'); - return seq; -} -function createPairs(schema, iterable, ctx) { - const { replacer } = ctx; - const pairs = new YAMLSeq.YAMLSeq(schema); - pairs.tag = 'tag:yaml.org,2002:pairs'; - let i = 0; - if (iterable && Symbol.iterator in Object(iterable)) - for (let it of iterable) { - if (typeof replacer === 'function') - it = replacer.call(iterable, String(i++), it); - let key, value; - if (Array.isArray(it)) { - if (it.length === 2) { - key = it[0]; - value = it[1]; - } - else - throw new TypeError(`Expected [key, value] tuple: ${it}`); - } - else if (it && it instanceof Object) { - const keys = Object.keys(it); - if (keys.length === 1) { - key = keys[0]; - value = it[key]; - } - else - throw new TypeError(`Expected { key: value } tuple: ${it}`); - } - else { - key = it; - } - pairs.items.push(Pair.createPair(key, value, ctx)); - } - return pairs; -} -const pairs = { - collection: 'seq', - default: false, - tag: 'tag:yaml.org,2002:pairs', - resolve: resolvePairs, - createNode: createPairs -}; - -exports.createPairs = createPairs; -exports.pairs = pairs; -exports.resolvePairs = resolvePairs; - - -/***/ }), - -/***/ 5389: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var map = __nccwpck_require__(83); -var _null = __nccwpck_require__(6703); -var seq = __nccwpck_require__(1693); -var string = __nccwpck_require__(2201); -var binary = __nccwpck_require__(5724); -var bool = __nccwpck_require__(2631); -var float = __nccwpck_require__(8035); -var int = __nccwpck_require__(9503); -var omap = __nccwpck_require__(8974); -var pairs = __nccwpck_require__(9841); -var set = __nccwpck_require__(7847); -var timestamp = __nccwpck_require__(1156); - -const schema = [ - map.map, - seq.seq, - string.string, - _null.nullTag, - bool.trueTag, - bool.falseTag, - int.intBin, - int.intOct, - int.int, - int.intHex, - float.floatNaN, - float.floatExp, - float.float, - binary.binary, - omap.omap, - pairs.pairs, - set.set, - timestamp.intTime, - timestamp.floatTime, - timestamp.timestamp -]; - -exports.schema = schema; - - -/***/ }), - -/***/ 7847: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(1399); -var Pair = __nccwpck_require__(246); -var YAMLMap = __nccwpck_require__(6011); - -class YAMLSet extends YAMLMap.YAMLMap { - constructor(schema) { - super(schema); - this.tag = YAMLSet.tag; - } - add(key) { - let pair; - if (Node.isPair(key)) - pair = key; - else if (typeof key === 'object' && - 'key' in key && - 'value' in key && - key.value === null) - pair = new Pair.Pair(key.key, null); - else - pair = new Pair.Pair(key, null); - const prev = YAMLMap.findPair(this.items, pair.key); - if (!prev) - this.items.push(pair); - } - /** - * If `keepPair` is `true`, returns the Pair matching `key`. - * Otherwise, returns the value of that Pair's key. - */ - get(key, keepPair) { - const pair = YAMLMap.findPair(this.items, key); - return !keepPair && Node.isPair(pair) - ? Node.isScalar(pair.key) - ? pair.key.value - : pair.key - : pair; - } - set(key, value) { - if (typeof value !== 'boolean') - throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`); - const prev = YAMLMap.findPair(this.items, key); - if (prev && !value) { - this.items.splice(this.items.indexOf(prev), 1); - } - else if (!prev && value) { - this.items.push(new Pair.Pair(key)); - } - } - toJSON(_, ctx) { - return super.toJSON(_, ctx, Set); - } - toString(ctx, onComment, onChompKeep) { - if (!ctx) - return JSON.stringify(this); - if (this.hasAllNullValues(true)) - return super.toString(Object.assign({}, ctx, { allNullValues: true }), onComment, onChompKeep); - else - throw new Error('Set items must all have null values'); - } -} -YAMLSet.tag = 'tag:yaml.org,2002:set'; -const set = { - collection: 'map', - identify: value => value instanceof Set, - nodeClass: YAMLSet, - default: false, - tag: 'tag:yaml.org,2002:set', - resolve(map, onError) { - if (Node.isMap(map)) { - if (map.hasAllNullValues(true)) - return Object.assign(new YAMLSet(), map); - else - onError('Set items must all have null values'); - } - else - onError('Expected a mapping for this tag'); - return map; - }, - createNode(schema, iterable, ctx) { - const { replacer } = ctx; - const set = new YAMLSet(schema); - if (iterable && Symbol.iterator in Object(iterable)) - for (let value of iterable) { - if (typeof replacer === 'function') - value = replacer.call(iterable, value, value); - set.items.push(Pair.createPair(value, null, ctx)); - } - return set; - } -}; - -exports.YAMLSet = YAMLSet; -exports.set = set; - - -/***/ }), - -/***/ 1156: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var stringifyNumber = __nccwpck_require__(4174); - -/** Internal types handle bigint as number, because TS can't figure it out. */ -function parseSexagesimal(str, asBigInt) { - const sign = str[0]; - const parts = sign === '-' || sign === '+' ? str.substring(1) : str; - const num = (n) => asBigInt ? BigInt(n) : Number(n); - const res = parts - .replace(/_/g, '') - .split(':') - .reduce((res, p) => res * num(60) + num(p), num(0)); - return (sign === '-' ? num(-1) * res : res); -} -/** - * hhhh:mm:ss.sss - * - * Internal types handle bigint as number, because TS can't figure it out. - */ -function stringifySexagesimal(node) { - let { value } = node; - let num = (n) => n; - if (typeof value === 'bigint') - num = n => BigInt(n); - else if (isNaN(value) || !isFinite(value)) - return stringifyNumber.stringifyNumber(node); - let sign = ''; - if (value < 0) { - sign = '-'; - value *= num(-1); - } - const _60 = num(60); - const parts = [value % _60]; // seconds, including ms - if (value < 60) { - parts.unshift(0); // at least one : is required - } - else { - value = (value - parts[0]) / _60; - parts.unshift(value % _60); // minutes - if (value >= 60) { - value = (value - parts[0]) / _60; - parts.unshift(value); // hours - } - } - return (sign + - parts - .map(n => (n < 10 ? '0' + String(n) : String(n))) - .join(':') - .replace(/000000\d*$/, '') // % 60 may introduce error - ); -} -const intTime = { - identify: value => typeof value === 'bigint' || Number.isInteger(value), - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'TIME', - test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+$/, - resolve: (str, _onError, { intAsBigInt }) => parseSexagesimal(str, intAsBigInt), - stringify: stringifySexagesimal -}; -const floatTime = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - format: 'TIME', - test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*$/, - resolve: str => parseSexagesimal(str, false), - stringify: stringifySexagesimal -}; -const timestamp = { - identify: value => value instanceof Date, - default: true, - tag: 'tag:yaml.org,2002:timestamp', - // If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part - // may be omitted altogether, resulting in a date format. In such a case, the time part is - // assumed to be 00:00:00Z (start of day, UTC). - test: RegExp('^([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd - '(?:' + // time is optional - '(?:t|T|[ \\t]+)' + // t | T | whitespace - '([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)? - '(?:[ \\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30 - ')?$'), - resolve(str) { - const match = str.match(timestamp.test); - if (!match) - throw new Error('!!timestamp expects a date, starting with yyyy-mm-dd'); - const [, year, month, day, hour, minute, second] = match.map(Number); - const millisec = match[7] ? Number((match[7] + '00').substr(1, 3)) : 0; - let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec); - const tz = match[8]; - if (tz && tz !== 'Z') { - let d = parseSexagesimal(tz, false); - if (Math.abs(d) < 30) - d *= 60; - date -= 60000 * d; - } - return new Date(date); - }, - stringify: ({ value }) => value.toISOString().replace(/((T00:00)?:00)?\.000Z$/, '') -}; - -exports.floatTime = floatTime; -exports.intTime = intTime; -exports.timestamp = timestamp; - - -/***/ }), - -/***/ 2889: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -const FOLD_FLOW = 'flow'; -const FOLD_BLOCK = 'block'; -const FOLD_QUOTED = 'quoted'; -/** - * Tries to keep input at up to `lineWidth` characters, splitting only on spaces - * not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are - * terminated with `\n` and started with `indent`. - */ -function foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth = 80, minContentWidth = 20, onFold, onOverflow } = {}) { - if (!lineWidth || lineWidth < 0) - return text; - const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length); - if (text.length <= endStep) - return text; - const folds = []; - const escapedFolds = {}; - let end = lineWidth - indent.length; - if (typeof indentAtStart === 'number') { - if (indentAtStart > lineWidth - Math.max(2, minContentWidth)) - folds.push(0); - else - end = lineWidth - indentAtStart; - } - let split = undefined; - let prev = undefined; - let overflow = false; - let i = -1; - let escStart = -1; - let escEnd = -1; - if (mode === FOLD_BLOCK) { - i = consumeMoreIndentedLines(text, i); - if (i !== -1) - end = i + endStep; - } - for (let ch; (ch = text[(i += 1)]);) { - if (mode === FOLD_QUOTED && ch === '\\') { - escStart = i; - switch (text[i + 1]) { - case 'x': - i += 3; - break; - case 'u': - i += 5; - break; - case 'U': - i += 9; - break; - default: - i += 1; - } - escEnd = i; - } - if (ch === '\n') { - if (mode === FOLD_BLOCK) - i = consumeMoreIndentedLines(text, i); - end = i + endStep; - split = undefined; - } - else { - if (ch === ' ' && - prev && - prev !== ' ' && - prev !== '\n' && - prev !== '\t') { - // space surrounded by non-space can be replaced with newline + indent - const next = text[i + 1]; - if (next && next !== ' ' && next !== '\n' && next !== '\t') - split = i; - } - if (i >= end) { - if (split) { - folds.push(split); - end = split + endStep; - split = undefined; - } - else if (mode === FOLD_QUOTED) { - // white-space collected at end may stretch past lineWidth - while (prev === ' ' || prev === '\t') { - prev = ch; - ch = text[(i += 1)]; - overflow = true; - } - // Account for newline escape, but don't break preceding escape - const j = i > escEnd + 1 ? i - 2 : escStart - 1; - // Bail out if lineWidth & minContentWidth are shorter than an escape string - if (escapedFolds[j]) - return text; - folds.push(j); - escapedFolds[j] = true; - end = j + endStep; - split = undefined; - } - else { - overflow = true; - } - } - } - prev = ch; - } - if (overflow && onOverflow) - onOverflow(); - if (folds.length === 0) - return text; - if (onFold) - onFold(); - let res = text.slice(0, folds[0]); - for (let i = 0; i < folds.length; ++i) { - const fold = folds[i]; - const end = folds[i + 1] || text.length; - if (fold === 0) - res = `\n${indent}${text.slice(0, end)}`; - else { - if (mode === FOLD_QUOTED && escapedFolds[fold]) - res += `${text[fold]}\\`; - res += `\n${indent}${text.slice(fold + 1, end)}`; - } - } - return res; -} -/** - * Presumes `i + 1` is at the start of a line - * @returns index of last newline in more-indented block - */ -function consumeMoreIndentedLines(text, i) { - let ch = text[i + 1]; - while (ch === ' ' || ch === '\t') { - do { - ch = text[(i += 1)]; - } while (ch && ch !== '\n'); - ch = text[i + 1]; - } - return i; -} - -exports.FOLD_BLOCK = FOLD_BLOCK; -exports.FOLD_FLOW = FOLD_FLOW; -exports.FOLD_QUOTED = FOLD_QUOTED; -exports.foldFlowLines = foldFlowLines; - - -/***/ }), - -/***/ 8409: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var anchors = __nccwpck_require__(8459); -var Node = __nccwpck_require__(1399); -var stringifyComment = __nccwpck_require__(5182); -var stringifyString = __nccwpck_require__(6226); - -function createStringifyContext(doc, options) { - const opt = Object.assign({ - blockQuote: true, - commentString: stringifyComment.stringifyComment, - defaultKeyType: null, - defaultStringType: 'PLAIN', - directives: null, - doubleQuotedAsJSON: false, - doubleQuotedMinMultiLineLength: 40, - falseStr: 'false', - indentSeq: true, - lineWidth: 80, - minContentWidth: 20, - nullStr: 'null', - simpleKeys: false, - singleQuote: null, - trueStr: 'true', - verifyAliasOrder: true - }, doc.schema.toStringOptions, options); - let inFlow; - switch (opt.collectionStyle) { - case 'block': - inFlow = false; - break; - case 'flow': - inFlow = true; - break; - default: - inFlow = null; - } - return { - anchors: new Set(), - doc, - indent: '', - indentStep: typeof opt.indent === 'number' ? ' '.repeat(opt.indent) : ' ', - inFlow, - options: opt - }; -} -function getTagObject(tags, item) { - if (item.tag) { - const match = tags.filter(t => t.tag === item.tag); - if (match.length > 0) - return match.find(t => t.format === item.format) ?? match[0]; - } - let tagObj = undefined; - let obj; - if (Node.isScalar(item)) { - obj = item.value; - const match = tags.filter(t => t.identify?.(obj)); - tagObj = - match.find(t => t.format === item.format) ?? match.find(t => !t.format); - } - else { - obj = item; - tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass); - } - if (!tagObj) { - const name = obj?.constructor?.name ?? typeof obj; - throw new Error(`Tag not resolved for ${name} value`); - } - return tagObj; -} -// needs to be called before value stringifier to allow for circular anchor refs -function stringifyProps(node, tagObj, { anchors: anchors$1, doc }) { - if (!doc.directives) - return ''; - const props = []; - const anchor = (Node.isScalar(node) || Node.isCollection(node)) && node.anchor; - if (anchor && anchors.anchorIsValid(anchor)) { - anchors$1.add(anchor); - props.push(`&${anchor}`); - } - const tag = node.tag ? node.tag : tagObj.default ? null : tagObj.tag; - if (tag) - props.push(doc.directives.tagString(tag)); - return props.join(' '); -} -function stringify(item, ctx, onComment, onChompKeep) { - if (Node.isPair(item)) - return item.toString(ctx, onComment, onChompKeep); - if (Node.isAlias(item)) { - if (ctx.doc.directives) - return item.toString(ctx); - if (ctx.resolvedAliases?.has(item)) { - throw new TypeError(`Cannot stringify circular structure without alias nodes`); - } - else { - if (ctx.resolvedAliases) - ctx.resolvedAliases.add(item); - else - ctx.resolvedAliases = new Set([item]); - item = item.resolve(ctx.doc); - } - } - let tagObj = undefined; - const node = Node.isNode(item) - ? item - : ctx.doc.createNode(item, { onTagObj: o => (tagObj = o) }); - if (!tagObj) - tagObj = getTagObject(ctx.doc.schema.tags, node); - const props = stringifyProps(node, tagObj, ctx); - if (props.length > 0) - ctx.indentAtStart = (ctx.indentAtStart ?? 0) + props.length + 1; - const str = typeof tagObj.stringify === 'function' - ? tagObj.stringify(node, ctx, onComment, onChompKeep) - : Node.isScalar(node) - ? stringifyString.stringifyString(node, ctx, onComment, onChompKeep) - : node.toString(ctx, onComment, onChompKeep); - if (!props) - return str; - return Node.isScalar(node) || str[0] === '{' || str[0] === '[' - ? `${props} ${str}` - : `${props}\n${ctx.indent}${str}`; -} - -exports.createStringifyContext = createStringifyContext; -exports.stringify = stringify; - - -/***/ }), - -/***/ 2466: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Collection = __nccwpck_require__(3466); -var Node = __nccwpck_require__(1399); -var stringify = __nccwpck_require__(8409); -var stringifyComment = __nccwpck_require__(5182); - -function stringifyCollection(collection, ctx, options) { - const flow = ctx.inFlow ?? collection.flow; - const stringify = flow ? stringifyFlowCollection : stringifyBlockCollection; - return stringify(collection, ctx, options); -} -function stringifyBlockCollection({ comment, items }, ctx, { blockItemPrefix, flowChars, itemIndent, onChompKeep, onComment }) { - const { indent, options: { commentString } } = ctx; - const itemCtx = Object.assign({}, ctx, { indent: itemIndent, type: null }); - let chompKeep = false; // flag for the preceding node's status - const lines = []; - for (let i = 0; i < items.length; ++i) { - const item = items[i]; - let comment = null; - if (Node.isNode(item)) { - if (!chompKeep && item.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, item.commentBefore, chompKeep); - if (item.comment) - comment = item.comment; - } - else if (Node.isPair(item)) { - const ik = Node.isNode(item.key) ? item.key : null; - if (ik) { - if (!chompKeep && ik.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, ik.commentBefore, chompKeep); - } - } - chompKeep = false; - let str = stringify.stringify(item, itemCtx, () => (comment = null), () => (chompKeep = true)); - if (comment) - str += stringifyComment.lineComment(str, itemIndent, commentString(comment)); - if (chompKeep && comment) - chompKeep = false; - lines.push(blockItemPrefix + str); - } - let str; - if (lines.length === 0) { - str = flowChars.start + flowChars.end; - } - else { - str = lines[0]; - for (let i = 1; i < lines.length; ++i) { - const line = lines[i]; - str += line ? `\n${indent}${line}` : '\n'; - } - } - if (comment) { - str += '\n' + stringifyComment.indentComment(commentString(comment), indent); - if (onComment) - onComment(); - } - else if (chompKeep && onChompKeep) - onChompKeep(); - return str; -} -function stringifyFlowCollection({ comment, items }, ctx, { flowChars, itemIndent, onComment }) { - const { indent, indentStep, options: { commentString } } = ctx; - itemIndent += indentStep; - const itemCtx = Object.assign({}, ctx, { - indent: itemIndent, - inFlow: true, - type: null - }); - let reqNewline = false; - let linesAtValue = 0; - const lines = []; - for (let i = 0; i < items.length; ++i) { - const item = items[i]; - let comment = null; - if (Node.isNode(item)) { - if (item.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, item.commentBefore, false); - if (item.comment) - comment = item.comment; - } - else if (Node.isPair(item)) { - const ik = Node.isNode(item.key) ? item.key : null; - if (ik) { - if (ik.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, ik.commentBefore, false); - if (ik.comment) - reqNewline = true; - } - const iv = Node.isNode(item.value) ? item.value : null; - if (iv) { - if (iv.comment) - comment = iv.comment; - if (iv.commentBefore) - reqNewline = true; - } - else if (item.value == null && ik && ik.comment) { - comment = ik.comment; - } - } - if (comment) - reqNewline = true; - let str = stringify.stringify(item, itemCtx, () => (comment = null)); - if (i < items.length - 1) - str += ','; - if (comment) - str += stringifyComment.lineComment(str, itemIndent, commentString(comment)); - if (!reqNewline && (lines.length > linesAtValue || str.includes('\n'))) - reqNewline = true; - lines.push(str); - linesAtValue = lines.length; - } - let str; - const { start, end } = flowChars; - if (lines.length === 0) { - str = start + end; - } - else { - if (!reqNewline) { - const len = lines.reduce((sum, line) => sum + line.length + 2, 2); - reqNewline = len > Collection.Collection.maxFlowStringSingleLineLength; - } - if (reqNewline) { - str = start; - for (const line of lines) - str += line ? `\n${indentStep}${indent}${line}` : '\n'; - str += `\n${indent}${end}`; - } - else { - str = `${start} ${lines.join(' ')} ${end}`; - } - } - if (comment) { - str += stringifyComment.lineComment(str, commentString(comment), indent); - if (onComment) - onComment(); - } - return str; -} -function addCommentBefore({ indent, options: { commentString } }, lines, comment, chompKeep) { - if (comment && chompKeep) - comment = comment.replace(/^\n+/, ''); - if (comment) { - const ic = stringifyComment.indentComment(commentString(comment), indent); - lines.push(ic.trimStart()); // Avoid double indent on first line - } -} - -exports.stringifyCollection = stringifyCollection; - - -/***/ }), - -/***/ 5182: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -/** - * Stringifies a comment. - * - * Empty comment lines are left empty, - * lines consisting of a single space are replaced by `#`, - * and all other lines are prefixed with a `#`. - */ -const stringifyComment = (str) => str.replace(/^(?!$)(?: $)?/gm, '#'); -function indentComment(comment, indent) { - if (/^\n+$/.test(comment)) - return comment.substring(1); - return indent ? comment.replace(/^(?! *$)/gm, indent) : comment; -} -const lineComment = (str, indent, comment) => str.endsWith('\n') - ? indentComment(comment, indent) - : comment.includes('\n') - ? '\n' + indentComment(comment, indent) - : (str.endsWith(' ') ? '' : ' ') + comment; - -exports.indentComment = indentComment; -exports.lineComment = lineComment; -exports.stringifyComment = stringifyComment; - - -/***/ }), - -/***/ 5225: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(1399); -var stringify = __nccwpck_require__(8409); -var stringifyComment = __nccwpck_require__(5182); - -function stringifyDocument(doc, options) { - const lines = []; - let hasDirectives = options.directives === true; - if (options.directives !== false && doc.directives) { - const dir = doc.directives.toString(doc); - if (dir) { - lines.push(dir); - hasDirectives = true; - } - else if (doc.directives.docStart) - hasDirectives = true; - } - if (hasDirectives) - lines.push('---'); - const ctx = stringify.createStringifyContext(doc, options); - const { commentString } = ctx.options; - if (doc.commentBefore) { - if (lines.length !== 1) - lines.unshift(''); - const cs = commentString(doc.commentBefore); - lines.unshift(stringifyComment.indentComment(cs, '')); - } - let chompKeep = false; - let contentComment = null; - if (doc.contents) { - if (Node.isNode(doc.contents)) { - if (doc.contents.spaceBefore && hasDirectives) - lines.push(''); - if (doc.contents.commentBefore) { - const cs = commentString(doc.contents.commentBefore); - lines.push(stringifyComment.indentComment(cs, '')); - } - // top-level block scalars need to be indented if followed by a comment - ctx.forceBlockIndent = !!doc.comment; - contentComment = doc.contents.comment; - } - const onChompKeep = contentComment ? undefined : () => (chompKeep = true); - let body = stringify.stringify(doc.contents, ctx, () => (contentComment = null), onChompKeep); - if (contentComment) - body += stringifyComment.lineComment(body, '', commentString(contentComment)); - if ((body[0] === '|' || body[0] === '>') && - lines[lines.length - 1] === '---') { - // Top-level block scalars with a preceding doc marker ought to use the - // same line for their header. - lines[lines.length - 1] = `--- ${body}`; - } - else - lines.push(body); - } - else { - lines.push(stringify.stringify(doc.contents, ctx)); - } - if (doc.directives?.docEnd) { - if (doc.comment) { - const cs = commentString(doc.comment); - if (cs.includes('\n')) { - lines.push('...'); - lines.push(stringifyComment.indentComment(cs, '')); - } - else { - lines.push(`... ${cs}`); - } - } - else { - lines.push('...'); - } - } - else { - let dc = doc.comment; - if (dc && chompKeep) - dc = dc.replace(/^\n+/, ''); - if (dc) { - if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '') - lines.push(''); - lines.push(stringifyComment.indentComment(commentString(dc), '')); - } - } - return lines.join('\n') + '\n'; -} - -exports.stringifyDocument = stringifyDocument; - - -/***/ }), - -/***/ 4174: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -function stringifyNumber({ format, minFractionDigits, tag, value }) { - if (typeof value === 'bigint') - return String(value); - const num = typeof value === 'number' ? value : Number(value); - if (!isFinite(num)) - return isNaN(num) ? '.nan' : num < 0 ? '-.inf' : '.inf'; - let n = JSON.stringify(value); - if (!format && - minFractionDigits && - (!tag || tag === 'tag:yaml.org,2002:float') && - /^\d/.test(n)) { - let i = n.indexOf('.'); - if (i < 0) { - i = n.length; - n += '.'; - } - let d = minFractionDigits - (n.length - i - 1); - while (d-- > 0) - n += '0'; - } - return n; -} - -exports.stringifyNumber = stringifyNumber; - - -/***/ }), - -/***/ 4875: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(1399); -var Scalar = __nccwpck_require__(9338); -var stringify = __nccwpck_require__(8409); -var stringifyComment = __nccwpck_require__(5182); - -function stringifyPair({ key, value }, ctx, onComment, onChompKeep) { - const { allNullValues, doc, indent, indentStep, options: { commentString, indentSeq, simpleKeys } } = ctx; - let keyComment = (Node.isNode(key) && key.comment) || null; - if (simpleKeys) { - if (keyComment) { - throw new Error('With simple keys, key nodes cannot have comments'); - } - if (Node.isCollection(key)) { - const msg = 'With simple keys, collection cannot be used as a key value'; - throw new Error(msg); - } - } - let explicitKey = !simpleKeys && - (!key || - (keyComment && value == null && !ctx.inFlow) || - Node.isCollection(key) || - (Node.isScalar(key) - ? key.type === Scalar.Scalar.BLOCK_FOLDED || key.type === Scalar.Scalar.BLOCK_LITERAL - : typeof key === 'object')); - ctx = Object.assign({}, ctx, { - allNullValues: false, - implicitKey: !explicitKey && (simpleKeys || !allNullValues), - indent: indent + indentStep - }); - let keyCommentDone = false; - let chompKeep = false; - let str = stringify.stringify(key, ctx, () => (keyCommentDone = true), () => (chompKeep = true)); - if (!explicitKey && !ctx.inFlow && str.length > 1024) { - if (simpleKeys) - throw new Error('With simple keys, single line scalar must not span more than 1024 characters'); - explicitKey = true; - } - if (ctx.inFlow) { - if (allNullValues || value == null) { - if (keyCommentDone && onComment) - onComment(); - return str === '' ? '?' : explicitKey ? `? ${str}` : str; - } - } - else if ((allNullValues && !simpleKeys) || (value == null && explicitKey)) { - str = `? ${str}`; - if (keyComment && !keyCommentDone) { - str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment)); - } - else if (chompKeep && onChompKeep) - onChompKeep(); - return str; - } - if (keyCommentDone) - keyComment = null; - if (explicitKey) { - if (keyComment) - str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment)); - str = `? ${str}\n${indent}:`; - } - else { - str = `${str}:`; - if (keyComment) - str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment)); - } - let vcb = ''; - let valueComment = null; - if (Node.isNode(value)) { - if (value.spaceBefore) - vcb = '\n'; - if (value.commentBefore) { - const cs = commentString(value.commentBefore); - vcb += `\n${stringifyComment.indentComment(cs, ctx.indent)}`; - } - valueComment = value.comment; - } - else if (value && typeof value === 'object') { - value = doc.createNode(value); - } - ctx.implicitKey = false; - if (!explicitKey && !keyComment && Node.isScalar(value)) - ctx.indentAtStart = str.length + 1; - chompKeep = false; - if (!indentSeq && - indentStep.length >= 2 && - !ctx.inFlow && - !explicitKey && - Node.isSeq(value) && - !value.flow && - !value.tag && - !value.anchor) { - // If indentSeq === false, consider '- ' as part of indentation where possible - ctx.indent = ctx.indent.substr(2); - } - let valueCommentDone = false; - const valueStr = stringify.stringify(value, ctx, () => (valueCommentDone = true), () => (chompKeep = true)); - let ws = ' '; - if (vcb || keyComment) { - if (valueStr === '' && !ctx.inFlow) - ws = vcb === '\n' ? '\n\n' : vcb; - else - ws = `${vcb}\n${ctx.indent}`; - } - else if (!explicitKey && Node.isCollection(value)) { - const flow = valueStr[0] === '[' || valueStr[0] === '{'; - if (!flow || valueStr.includes('\n')) - ws = `\n${ctx.indent}`; - } - else if (valueStr === '' || valueStr[0] === '\n') - ws = ''; - str += ws + valueStr; - if (ctx.inFlow) { - if (valueCommentDone && onComment) - onComment(); - } - else if (valueComment && !valueCommentDone) { - str += stringifyComment.lineComment(str, ctx.indent, commentString(valueComment)); - } - else if (chompKeep && onChompKeep) { - onChompKeep(); - } - return str; -} - -exports.stringifyPair = stringifyPair; - - -/***/ }), - -/***/ 6226: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Scalar = __nccwpck_require__(9338); -var foldFlowLines = __nccwpck_require__(2889); - -const getFoldOptions = (ctx) => ({ - indentAtStart: ctx.indentAtStart, - lineWidth: ctx.options.lineWidth, - minContentWidth: ctx.options.minContentWidth -}); -// Also checks for lines starting with %, as parsing the output as YAML 1.1 will -// presume that's starting a new document. -const containsDocumentMarker = (str) => /^(%|---|\.\.\.)/m.test(str); -function lineLengthOverLimit(str, lineWidth, indentLength) { - if (!lineWidth || lineWidth < 0) - return false; - const limit = lineWidth - indentLength; - const strLen = str.length; - if (strLen <= limit) - return false; - for (let i = 0, start = 0; i < strLen; ++i) { - if (str[i] === '\n') { - if (i - start > limit) - return true; - start = i + 1; - if (strLen - start <= limit) - return false; - } - } - return true; -} -function doubleQuotedString(value, ctx) { - const json = JSON.stringify(value); - if (ctx.options.doubleQuotedAsJSON) - return json; - const { implicitKey } = ctx; - const minMultiLineLength = ctx.options.doubleQuotedMinMultiLineLength; - const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : ''); - let str = ''; - let start = 0; - for (let i = 0, ch = json[i]; ch; ch = json[++i]) { - if (ch === ' ' && json[i + 1] === '\\' && json[i + 2] === 'n') { - // space before newline needs to be escaped to not be folded - str += json.slice(start, i) + '\\ '; - i += 1; - start = i; - ch = '\\'; - } - if (ch === '\\') - switch (json[i + 1]) { - case 'u': - { - str += json.slice(start, i); - const code = json.substr(i + 2, 4); - switch (code) { - case '0000': - str += '\\0'; - break; - case '0007': - str += '\\a'; - break; - case '000b': - str += '\\v'; - break; - case '001b': - str += '\\e'; - break; - case '0085': - str += '\\N'; - break; - case '00a0': - str += '\\_'; - break; - case '2028': - str += '\\L'; - break; - case '2029': - str += '\\P'; - break; - default: - if (code.substr(0, 2) === '00') - str += '\\x' + code.substr(2); - else - str += json.substr(i, 6); - } - i += 5; - start = i + 1; - } - break; - case 'n': - if (implicitKey || - json[i + 2] === '"' || - json.length < minMultiLineLength) { - i += 1; - } - else { - // folding will eat first newline - str += json.slice(start, i) + '\n\n'; - while (json[i + 2] === '\\' && - json[i + 3] === 'n' && - json[i + 4] !== '"') { - str += '\n'; - i += 2; - } - str += indent; - // space after newline needs to be escaped to not be folded - if (json[i + 2] === ' ') - str += '\\'; - i += 1; - start = i + 1; - } - break; - default: - i += 1; - } - } - str = start ? str + json.slice(start) : json; - return implicitKey - ? str - : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_QUOTED, getFoldOptions(ctx)); -} -function singleQuotedString(value, ctx) { - if (ctx.options.singleQuote === false || - (ctx.implicitKey && value.includes('\n')) || - /[ \t]\n|\n[ \t]/.test(value) // single quoted string can't have leading or trailing whitespace around newline - ) - return doubleQuotedString(value, ctx); - const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : ''); - const res = "'" + value.replace(/'/g, "''").replace(/\n+/g, `$&\n${indent}`) + "'"; - return ctx.implicitKey - ? res - : foldFlowLines.foldFlowLines(res, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx)); -} -function quotedString(value, ctx) { - const { singleQuote } = ctx.options; - let qs; - if (singleQuote === false) - qs = doubleQuotedString; - else { - const hasDouble = value.includes('"'); - const hasSingle = value.includes("'"); - if (hasDouble && !hasSingle) - qs = singleQuotedString; - else if (hasSingle && !hasDouble) - qs = doubleQuotedString; - else - qs = singleQuote ? singleQuotedString : doubleQuotedString; - } - return qs(value, ctx); -} -function blockString({ comment, type, value }, ctx, onComment, onChompKeep) { - const { blockQuote, commentString, lineWidth } = ctx.options; - // 1. Block can't end in whitespace unless the last line is non-empty. - // 2. Strings consisting of only whitespace are best rendered explicitly. - if (!blockQuote || /\n[\t ]+$/.test(value) || /^\s*$/.test(value)) { - return quotedString(value, ctx); - } - const indent = ctx.indent || - (ctx.forceBlockIndent || containsDocumentMarker(value) ? ' ' : ''); - const literal = blockQuote === 'literal' - ? true - : blockQuote === 'folded' || type === Scalar.Scalar.BLOCK_FOLDED - ? false - : type === Scalar.Scalar.BLOCK_LITERAL - ? true - : !lineLengthOverLimit(value, lineWidth, indent.length); - if (!value) - return literal ? '|\n' : '>\n'; - // determine chomping from whitespace at value end - let chomp; - let endStart; - for (endStart = value.length; endStart > 0; --endStart) { - const ch = value[endStart - 1]; - if (ch !== '\n' && ch !== '\t' && ch !== ' ') - break; - } - let end = value.substring(endStart); - const endNlPos = end.indexOf('\n'); - if (endNlPos === -1) { - chomp = '-'; // strip - } - else if (value === end || endNlPos !== end.length - 1) { - chomp = '+'; // keep - if (onChompKeep) - onChompKeep(); - } - else { - chomp = ''; // clip - } - if (end) { - value = value.slice(0, -end.length); - if (end[end.length - 1] === '\n') - end = end.slice(0, -1); - end = end.replace(/\n+(?!\n|$)/g, `$&${indent}`); - } - // determine indent indicator from whitespace at value start - let startWithSpace = false; - let startEnd; - let startNlPos = -1; - for (startEnd = 0; startEnd < value.length; ++startEnd) { - const ch = value[startEnd]; - if (ch === ' ') - startWithSpace = true; - else if (ch === '\n') - startNlPos = startEnd; - else - break; - } - let start = value.substring(0, startNlPos < startEnd ? startNlPos + 1 : startEnd); - if (start) { - value = value.substring(start.length); - start = start.replace(/\n+/g, `$&${indent}`); - } - const indentSize = indent ? '2' : '1'; // root is at -1 - let header = (literal ? '|' : '>') + (startWithSpace ? indentSize : '') + chomp; - if (comment) { - header += ' ' + commentString(comment.replace(/ ?[\r\n]+/g, ' ')); - if (onComment) - onComment(); - } - if (literal) { - value = value.replace(/\n+/g, `$&${indent}`); - return `${header}\n${indent}${start}${value}${end}`; - } - value = value - .replace(/\n+/g, '\n$&') - .replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded - // ^ more-ind. ^ empty ^ capture next empty lines only at end of indent - .replace(/\n+/g, `$&${indent}`); - const body = foldFlowLines.foldFlowLines(`${start}${value}${end}`, indent, foldFlowLines.FOLD_BLOCK, getFoldOptions(ctx)); - return `${header}\n${indent}${body}`; -} -function plainString(item, ctx, onComment, onChompKeep) { - const { type, value } = item; - const { actualString, implicitKey, indent, inFlow } = ctx; - if ((implicitKey && /[\n[\]{},]/.test(value)) || - (inFlow && /[[\]{},]/.test(value))) { - return quotedString(value, ctx); - } - if (!value || - /^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-]$|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test(value)) { - // not allowed: - // - empty string, '-' or '?' - // - start with an indicator character (except [?:-]) or /[?-] / - // - '\n ', ': ' or ' \n' anywhere - // - '#' not preceded by a non-space char - // - end with ' ' or ':' - return implicitKey || inFlow || !value.includes('\n') - ? quotedString(value, ctx) - : blockString(item, ctx, onComment, onChompKeep); - } - if (!implicitKey && - !inFlow && - type !== Scalar.Scalar.PLAIN && - value.includes('\n')) { - // Where allowed & type not set explicitly, prefer block style for multiline strings - return blockString(item, ctx, onComment, onChompKeep); - } - if (indent === '' && containsDocumentMarker(value)) { - ctx.forceBlockIndent = true; - return blockString(item, ctx, onComment, onChompKeep); - } - const str = value.replace(/\n+/g, `$&\n${indent}`); - // Verify that output will be parsed as a string, as e.g. plain numbers and - // booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'), - // and others in v1.1. - if (actualString) { - const test = (tag) => tag.default && tag.tag !== 'tag:yaml.org,2002:str' && tag.test?.test(str); - const { compat, tags } = ctx.doc.schema; - if (tags.some(test) || compat?.some(test)) - return quotedString(value, ctx); - } - return implicitKey - ? str - : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx)); -} -function stringifyString(item, ctx, onComment, onChompKeep) { - const { implicitKey, inFlow } = ctx; - const ss = typeof item.value === 'string' - ? item - : Object.assign({}, item, { value: String(item.value) }); - let { type } = item; - if (type !== Scalar.Scalar.QUOTE_DOUBLE) { - // force double quotes on control characters & unpaired surrogates - if (/[\x00-\x08\x0b-\x1f\x7f-\x9f\u{D800}-\u{DFFF}]/u.test(ss.value)) - type = Scalar.Scalar.QUOTE_DOUBLE; - } - const _stringify = (_type) => { - switch (_type) { - case Scalar.Scalar.BLOCK_FOLDED: - case Scalar.Scalar.BLOCK_LITERAL: - return implicitKey || inFlow - ? quotedString(ss.value, ctx) // blocks are not valid inside flow containers - : blockString(ss, ctx, onComment, onChompKeep); - case Scalar.Scalar.QUOTE_DOUBLE: - return doubleQuotedString(ss.value, ctx); - case Scalar.Scalar.QUOTE_SINGLE: - return singleQuotedString(ss.value, ctx); - case Scalar.Scalar.PLAIN: - return plainString(ss, ctx, onComment, onChompKeep); - default: - return null; - } - }; - let res = _stringify(type); - if (res === null) { - const { defaultKeyType, defaultStringType } = ctx.options; - const t = (implicitKey && defaultKeyType) || defaultStringType; - res = _stringify(t); - if (res === null) - throw new Error(`Unsupported default string type ${t}`); - } - return res; -} - -exports.stringifyString = stringifyString; - - -/***/ }), - -/***/ 6796: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var Node = __nccwpck_require__(1399); - -const BREAK = Symbol('break visit'); -const SKIP = Symbol('skip children'); -const REMOVE = Symbol('remove node'); -/** - * Apply a visitor to an AST node or document. - * - * Walks through the tree (depth-first) starting from `node`, calling a - * `visitor` function with three arguments: - * - `key`: For sequence values and map `Pair`, the node's index in the - * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly. - * `null` for the root node. - * - `node`: The current node. - * - `path`: The ancestry of the current node. - * - * The return value of the visitor may be used to control the traversal: - * - `undefined` (default): Do nothing and continue - * - `visit.SKIP`: Do not visit the children of this node, continue with next - * sibling - * - `visit.BREAK`: Terminate traversal completely - * - `visit.REMOVE`: Remove the current node, then continue with the next one - * - `Node`: Replace the current node, then continue by visiting it - * - `number`: While iterating the items of a sequence or map, set the index - * of the next step. This is useful especially if the index of the current - * node has changed. - * - * If `visitor` is a single function, it will be called with all values - * encountered in the tree, including e.g. `null` values. Alternatively, - * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`, - * `Alias` and `Scalar` node. To define the same visitor function for more than - * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar) - * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most - * specific defined one will be used for each node. - */ -function visit(node, visitor) { - const visitor_ = initVisitor(visitor); - if (Node.isDocument(node)) { - const cd = visit_(null, node.contents, visitor_, Object.freeze([node])); - if (cd === REMOVE) - node.contents = null; - } - else - visit_(null, node, visitor_, Object.freeze([])); -} -// Without the `as symbol` casts, TS declares these in the `visit` -// namespace using `var`, but then complains about that because -// `unique symbol` must be `const`. -/** Terminate visit traversal completely */ -visit.BREAK = BREAK; -/** Do not visit the children of the current node */ -visit.SKIP = SKIP; -/** Remove the current node */ -visit.REMOVE = REMOVE; -function visit_(key, node, visitor, path) { - const ctrl = callVisitor(key, node, visitor, path); - if (Node.isNode(ctrl) || Node.isPair(ctrl)) { - replaceNode(key, path, ctrl); - return visit_(key, ctrl, visitor, path); - } - if (typeof ctrl !== 'symbol') { - if (Node.isCollection(node)) { - path = Object.freeze(path.concat(node)); - for (let i = 0; i < node.items.length; ++i) { - const ci = visit_(i, node.items[i], visitor, path); - if (typeof ci === 'number') - i = ci - 1; - else if (ci === BREAK) - return BREAK; - else if (ci === REMOVE) { - node.items.splice(i, 1); - i -= 1; - } - } - } - else if (Node.isPair(node)) { - path = Object.freeze(path.concat(node)); - const ck = visit_('key', node.key, visitor, path); - if (ck === BREAK) - return BREAK; - else if (ck === REMOVE) - node.key = null; - const cv = visit_('value', node.value, visitor, path); - if (cv === BREAK) - return BREAK; - else if (cv === REMOVE) - node.value = null; - } - } - return ctrl; -} -/** - * Apply an async visitor to an AST node or document. - * - * Walks through the tree (depth-first) starting from `node`, calling a - * `visitor` function with three arguments: - * - `key`: For sequence values and map `Pair`, the node's index in the - * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly. - * `null` for the root node. - * - `node`: The current node. - * - `path`: The ancestry of the current node. - * - * The return value of the visitor may be used to control the traversal: - * - `Promise`: Must resolve to one of the following values - * - `undefined` (default): Do nothing and continue - * - `visit.SKIP`: Do not visit the children of this node, continue with next - * sibling - * - `visit.BREAK`: Terminate traversal completely - * - `visit.REMOVE`: Remove the current node, then continue with the next one - * - `Node`: Replace the current node, then continue by visiting it - * - `number`: While iterating the items of a sequence or map, set the index - * of the next step. This is useful especially if the index of the current - * node has changed. - * - * If `visitor` is a single function, it will be called with all values - * encountered in the tree, including e.g. `null` values. Alternatively, - * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`, - * `Alias` and `Scalar` node. To define the same visitor function for more than - * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar) - * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most - * specific defined one will be used for each node. - */ -async function visitAsync(node, visitor) { - const visitor_ = initVisitor(visitor); - if (Node.isDocument(node)) { - const cd = await visitAsync_(null, node.contents, visitor_, Object.freeze([node])); - if (cd === REMOVE) - node.contents = null; - } - else - await visitAsync_(null, node, visitor_, Object.freeze([])); -} -// Without the `as symbol` casts, TS declares these in the `visit` -// namespace using `var`, but then complains about that because -// `unique symbol` must be `const`. -/** Terminate visit traversal completely */ -visitAsync.BREAK = BREAK; -/** Do not visit the children of the current node */ -visitAsync.SKIP = SKIP; -/** Remove the current node */ -visitAsync.REMOVE = REMOVE; -async function visitAsync_(key, node, visitor, path) { - const ctrl = await callVisitor(key, node, visitor, path); - if (Node.isNode(ctrl) || Node.isPair(ctrl)) { - replaceNode(key, path, ctrl); - return visitAsync_(key, ctrl, visitor, path); - } - if (typeof ctrl !== 'symbol') { - if (Node.isCollection(node)) { - path = Object.freeze(path.concat(node)); - for (let i = 0; i < node.items.length; ++i) { - const ci = await visitAsync_(i, node.items[i], visitor, path); - if (typeof ci === 'number') - i = ci - 1; - else if (ci === BREAK) - return BREAK; - else if (ci === REMOVE) { - node.items.splice(i, 1); - i -= 1; - } - } - } - else if (Node.isPair(node)) { - path = Object.freeze(path.concat(node)); - const ck = await visitAsync_('key', node.key, visitor, path); - if (ck === BREAK) - return BREAK; - else if (ck === REMOVE) - node.key = null; - const cv = await visitAsync_('value', node.value, visitor, path); - if (cv === BREAK) - return BREAK; - else if (cv === REMOVE) - node.value = null; - } - } - return ctrl; -} -function initVisitor(visitor) { - if (typeof visitor === 'object' && - (visitor.Collection || visitor.Node || visitor.Value)) { - return Object.assign({ - Alias: visitor.Node, - Map: visitor.Node, - Scalar: visitor.Node, - Seq: visitor.Node - }, visitor.Value && { - Map: visitor.Value, - Scalar: visitor.Value, - Seq: visitor.Value - }, visitor.Collection && { - Map: visitor.Collection, - Seq: visitor.Collection - }, visitor); - } - return visitor; -} -function callVisitor(key, node, visitor, path) { - if (typeof visitor === 'function') - return visitor(key, node, path); - if (Node.isMap(node)) - return visitor.Map?.(key, node, path); - if (Node.isSeq(node)) - return visitor.Seq?.(key, node, path); - if (Node.isPair(node)) - return visitor.Pair?.(key, node, path); - if (Node.isScalar(node)) - return visitor.Scalar?.(key, node, path); - if (Node.isAlias(node)) - return visitor.Alias?.(key, node, path); - return undefined; -} -function replaceNode(key, path, node) { - const parent = path[path.length - 1]; - if (Node.isCollection(parent)) { - parent.items[key] = node; - } - else if (Node.isPair(parent)) { - if (key === 'key') - parent.key = node; - else - parent.value = node; - } - else if (Node.isDocument(parent)) { - parent.contents = node; - } - else { - const pt = Node.isAlias(parent) ? 'alias' : 'scalar'; - throw new Error(`Cannot replace node with ${pt} parent`); - } -} - -exports.visit = visit; -exports.visitAsync = visitAsync; - - /***/ }), /***/ 6844: diff --git a/dist/index.js.map b/dist/index.js.map index 4a4fe852e..8350ac4b4 100644 --- a/dist/index.js.map +++ b/dist/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","mappings":";;;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1IA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjUA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1RA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5lBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrYA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChgCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACn/CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChqDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzCA;;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7GA;AACA;;;;;;;;;ACDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC16EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5DA;;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5NA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7UA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9rBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACz7BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3IA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9HA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3TA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;AC3OA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;ACvNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;AC7BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;ACPA;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;;;;ACNA;AACA;;;;AEDA;AACA;AACA;AACA","sources":["../webpack://dependency-review-action/./lib/dependency-graph.js","../webpack://dependency-review-action/./lib/licenses.js","../webpack://dependency-review-action/./lib/main.js","../webpack://dependency-review-action/./lib/schemas.js","../webpack://dependency-review-action/./node_modules/@actions/core/lib/command.js","../webpack://dependency-review-action/./node_modules/@actions/core/lib/core.js","../webpack://dependency-review-action/./node_modules/@actions/core/lib/file-command.js","../webpack://dependency-review-action/./node_modules/@actions/core/lib/oidc-utils.js","../webpack://dependency-review-action/./node_modules/@actions/core/lib/summary.js","../webpack://dependency-review-action/./node_modules/@actions/core/lib/utils.js","../webpack://dependency-review-action/./node_modules/@actions/github/lib/context.js","../webpack://dependency-review-action/./node_modules/@actions/github/lib/github.js","../webpack://dependency-review-action/./node_modules/@actions/github/lib/internal/utils.js","../webpack://dependency-review-action/./node_modules/@actions/github/lib/utils.js","../webpack://dependency-review-action/./node_modules/@actions/http-client/lib/auth.js","../webpack://dependency-review-action/./node_modules/@actions/http-client/lib/index.js","../webpack://dependency-review-action/./node_modules/@actions/http-client/lib/proxy.js","../webpack://dependency-review-action/./node_modules/@octokit/auth-token/dist-node/index.js","../webpack://dependency-review-action/./node_modules/@octokit/core/dist-node/index.js","../webpack://dependency-review-action/./node_modules/@octokit/endpoint/dist-node/index.js","../webpack://dependency-review-action/./node_modules/@octokit/graphql/dist-node/index.js","../webpack://dependency-review-action/./node_modules/@octokit/plugin-paginate-rest/dist-node/index.js","../webpack://dependency-review-action/./node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js","../webpack://dependency-review-action/./node_modules/@octokit/plugin-retry/dist-node/index.js","../webpack://dependency-review-action/./node_modules/@octokit/request-error/dist-node/index.js","../webpack://dependency-review-action/./node_modules/@octokit/request/dist-node/index.js","../webpack://dependency-review-action/./node_modules/before-after-hook/index.js","../webpack://dependency-review-action/./node_modules/before-after-hook/lib/add.js","../webpack://dependency-review-action/./node_modules/before-after-hook/lib/register.js","../webpack://dependency-review-action/./node_modules/before-after-hook/lib/remove.js","../webpack://dependency-review-action/./node_modules/bottleneck/light.js","../webpack://dependency-review-action/./node_modules/deprecation/dist-node/index.js","../webpack://dependency-review-action/./node_modules/is-plain-object/dist/is-plain-object.js","../webpack://dependency-review-action/./node_modules/node-fetch/lib/index.js","../webpack://dependency-review-action/./node_modules/node-fetch/node_modules/tr46/index.js","../webpack://dependency-review-action/./node_modules/node-fetch/node_modules/webidl-conversions/lib/index.js","../webpack://dependency-review-action/./node_modules/node-fetch/node_modules/whatwg-url/lib/URL-impl.js","../webpack://dependency-review-action/./node_modules/node-fetch/node_modules/whatwg-url/lib/URL.js","../webpack://dependency-review-action/./node_modules/node-fetch/node_modules/whatwg-url/lib/public-api.js","../webpack://dependency-review-action/./node_modules/node-fetch/node_modules/whatwg-url/lib/url-state-machine.js","../webpack://dependency-review-action/./node_modules/node-fetch/node_modules/whatwg-url/lib/utils.js","../webpack://dependency-review-action/./node_modules/once/once.js","../webpack://dependency-review-action/./node_modules/tunnel/index.js","../webpack://dependency-review-action/./node_modules/tunnel/lib/tunnel.js","../webpack://dependency-review-action/./node_modules/universal-user-agent/dist-node/index.js","../webpack://dependency-review-action/./node_modules/wrappy/wrappy.js","../webpack://dependency-review-action/./node_modules/zod/lib/ZodError.js","../webpack://dependency-review-action/./node_modules/zod/lib/external.js","../webpack://dependency-review-action/./node_modules/zod/lib/helpers/errorUtil.js","../webpack://dependency-review-action/./node_modules/zod/lib/helpers/parseUtil.js","../webpack://dependency-review-action/./node_modules/zod/lib/helpers/typeAliases.js","../webpack://dependency-review-action/./node_modules/zod/lib/helpers/util.js","../webpack://dependency-review-action/./node_modules/zod/lib/index.js","../webpack://dependency-review-action/./node_modules/zod/lib/types.js","../webpack://dependency-review-action/./src/config.ts","../webpack://dependency-review-action/./src/filter.ts","../webpack://dependency-review-action/./src/schemas.ts","../webpack://dependency-review-action/./node_modules/@vercel/ncc/dist/ncc/@@notfound.js","../webpack://dependency-review-action/external node-commonjs \"assert\"","../webpack://dependency-review-action/external node-commonjs \"events\"","../webpack://dependency-review-action/external node-commonjs \"fs\"","../webpack://dependency-review-action/external node-commonjs \"http\"","../webpack://dependency-review-action/external node-commonjs \"https\"","../webpack://dependency-review-action/external node-commonjs \"net\"","../webpack://dependency-review-action/external node-commonjs \"os\"","../webpack://dependency-review-action/external node-commonjs \"path\"","../webpack://dependency-review-action/external node-commonjs \"punycode\"","../webpack://dependency-review-action/external node-commonjs \"stream\"","../webpack://dependency-review-action/external node-commonjs \"tls\"","../webpack://dependency-review-action/external node-commonjs \"url\"","../webpack://dependency-review-action/external node-commonjs \"util\"","../webpack://dependency-review-action/external node-commonjs \"zlib\"","../webpack://dependency-review-action/./node_modules/yaml/dist/compose/compose-collection.js","../webpack://dependency-review-action/./node_modules/yaml/dist/compose/compose-doc.js","../webpack://dependency-review-action/./node_modules/yaml/dist/compose/compose-node.js","../webpack://dependency-review-action/./node_modules/yaml/dist/compose/compose-scalar.js","../webpack://dependency-review-action/./node_modules/yaml/dist/compose/composer.js","../webpack://dependency-review-action/./node_modules/yaml/dist/compose/resolve-block-map.js","../webpack://dependency-review-action/./node_modules/yaml/dist/compose/resolve-block-scalar.js","../webpack://dependency-review-action/./node_modules/yaml/dist/compose/resolve-block-seq.js","../webpack://dependency-review-action/./node_modules/yaml/dist/compose/resolve-end.js","../webpack://dependency-review-action/./node_modules/yaml/dist/compose/resolve-flow-collection.js","../webpack://dependency-review-action/./node_modules/yaml/dist/compose/resolve-flow-scalar.js","../webpack://dependency-review-action/./node_modules/yaml/dist/compose/resolve-props.js","../webpack://dependency-review-action/./node_modules/yaml/dist/compose/util-contains-newline.js","../webpack://dependency-review-action/./node_modules/yaml/dist/compose/util-empty-scalar-position.js","../webpack://dependency-review-action/./node_modules/yaml/dist/compose/util-flow-indent-check.js","../webpack://dependency-review-action/./node_modules/yaml/dist/compose/util-map-includes.js","../webpack://dependency-review-action/./node_modules/yaml/dist/doc/Document.js","../webpack://dependency-review-action/./node_modules/yaml/dist/doc/anchors.js","../webpack://dependency-review-action/./node_modules/yaml/dist/doc/applyReviver.js","../webpack://dependency-review-action/./node_modules/yaml/dist/doc/createNode.js","../webpack://dependency-review-action/./node_modules/yaml/dist/doc/directives.js","../webpack://dependency-review-action/./node_modules/yaml/dist/errors.js","../webpack://dependency-review-action/./node_modules/yaml/dist/index.js","../webpack://dependency-review-action/./node_modules/yaml/dist/log.js","../webpack://dependency-review-action/./node_modules/yaml/dist/nodes/Alias.js","../webpack://dependency-review-action/./node_modules/yaml/dist/nodes/Collection.js","../webpack://dependency-review-action/./node_modules/yaml/dist/nodes/Node.js","../webpack://dependency-review-action/./node_modules/yaml/dist/nodes/Pair.js","../webpack://dependency-review-action/./node_modules/yaml/dist/nodes/Scalar.js","../webpack://dependency-review-action/./node_modules/yaml/dist/nodes/YAMLMap.js","../webpack://dependency-review-action/./node_modules/yaml/dist/nodes/YAMLSeq.js","../webpack://dependency-review-action/./node_modules/yaml/dist/nodes/addPairToJSMap.js","../webpack://dependency-review-action/./node_modules/yaml/dist/nodes/toJS.js","../webpack://dependency-review-action/./node_modules/yaml/dist/parse/cst-scalar.js","../webpack://dependency-review-action/./node_modules/yaml/dist/parse/cst-stringify.js","../webpack://dependency-review-action/./node_modules/yaml/dist/parse/cst-visit.js","../webpack://dependency-review-action/./node_modules/yaml/dist/parse/cst.js","../webpack://dependency-review-action/./node_modules/yaml/dist/parse/lexer.js","../webpack://dependency-review-action/./node_modules/yaml/dist/parse/line-counter.js","../webpack://dependency-review-action/./node_modules/yaml/dist/parse/parser.js","../webpack://dependency-review-action/./node_modules/yaml/dist/public-api.js","../webpack://dependency-review-action/./node_modules/yaml/dist/schema/Schema.js","../webpack://dependency-review-action/./node_modules/yaml/dist/schema/common/map.js","../webpack://dependency-review-action/./node_modules/yaml/dist/schema/common/null.js","../webpack://dependency-review-action/./node_modules/yaml/dist/schema/common/seq.js","../webpack://dependency-review-action/./node_modules/yaml/dist/schema/common/string.js","../webpack://dependency-review-action/./node_modules/yaml/dist/schema/core/bool.js","../webpack://dependency-review-action/./node_modules/yaml/dist/schema/core/float.js","../webpack://dependency-review-action/./node_modules/yaml/dist/schema/core/int.js","../webpack://dependency-review-action/./node_modules/yaml/dist/schema/core/schema.js","../webpack://dependency-review-action/./node_modules/yaml/dist/schema/json/schema.js","../webpack://dependency-review-action/./node_modules/yaml/dist/schema/tags.js","../webpack://dependency-review-action/./node_modules/yaml/dist/schema/yaml-1.1/binary.js","../webpack://dependency-review-action/./node_modules/yaml/dist/schema/yaml-1.1/bool.js","../webpack://dependency-review-action/./node_modules/yaml/dist/schema/yaml-1.1/float.js","../webpack://dependency-review-action/./node_modules/yaml/dist/schema/yaml-1.1/int.js","../webpack://dependency-review-action/./node_modules/yaml/dist/schema/yaml-1.1/omap.js","../webpack://dependency-review-action/./node_modules/yaml/dist/schema/yaml-1.1/pairs.js","../webpack://dependency-review-action/./node_modules/yaml/dist/schema/yaml-1.1/schema.js","../webpack://dependency-review-action/./node_modules/yaml/dist/schema/yaml-1.1/set.js","../webpack://dependency-review-action/./node_modules/yaml/dist/schema/yaml-1.1/timestamp.js","../webpack://dependency-review-action/./node_modules/yaml/dist/stringify/foldFlowLines.js","../webpack://dependency-review-action/./node_modules/yaml/dist/stringify/stringify.js","../webpack://dependency-review-action/./node_modules/yaml/dist/stringify/stringifyCollection.js","../webpack://dependency-review-action/./node_modules/yaml/dist/stringify/stringifyComment.js","../webpack://dependency-review-action/./node_modules/yaml/dist/stringify/stringifyDocument.js","../webpack://dependency-review-action/./node_modules/yaml/dist/stringify/stringifyNumber.js","../webpack://dependency-review-action/./node_modules/yaml/dist/stringify/stringifyPair.js","../webpack://dependency-review-action/./node_modules/yaml/dist/stringify/stringifyString.js","../webpack://dependency-review-action/./node_modules/yaml/dist/visit.js","../webpack://dependency-review-action/./node_modules/ansi-styles/index.js","../webpack://dependency-review-action/webpack/bootstrap","../webpack://dependency-review-action/webpack/runtime/define property getters","../webpack://dependency-review-action/webpack/runtime/hasOwnProperty shorthand","../webpack://dependency-review-action/webpack/runtime/make namespace object","../webpack://dependency-review-action/webpack/runtime/compat","../webpack://dependency-review-action/webpack/before-startup","../webpack://dependency-review-action/webpack/startup","../webpack://dependency-review-action/webpack/after-startup"],"sourcesContent":["\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.compare = void 0;\nconst core = __importStar(require(\"@actions/core\"));\nconst githubUtils = __importStar(require(\"@actions/github/lib/utils\"));\nconst retry = __importStar(require(\"@octokit/plugin-retry\"));\nconst schemas_1 = require(\"./schemas\");\nconst retryingOctokit = githubUtils.GitHub.plugin(retry.retry);\nconst octo = new retryingOctokit(githubUtils.getOctokitOptions(core.getInput('repo-token', { required: true })));\nfunction compare({ owner, repo, baseRef, headRef }) {\n return __awaiter(this, void 0, void 0, function* () {\n const changes = yield octo.paginate('GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}', {\n owner,\n repo,\n basehead: `${baseRef}...${headRef}`\n });\n return schemas_1.ChangesSchema.parse(changes);\n });\n}\nexports.compare = compare;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getDeniedLicenseChanges = void 0;\n/**\n * Loops through a list of changes, filtering and returning the\n * ones that don't conform to the licenses allow/deny lists.\n *\n * Keep in mind that we don't let users specify both an allow and a deny\n * list in their config files, so this code works under the assumption that\n * one of the two list parameters will be empty. If both lists are provided,\n * we will ignore the deny list.\n * @param {Change[]} changes The list of changes to filter.\n * @param { { allow?: string[], deny?: string[]}} licenses An object with `allow`/`deny` keys, each containing a list of licenses.\n * @returns {Array 0) {\n printLicensesError(licenseErrors, licenses);\n core.setFailed('Dependency review detected prohibited licenses.');\n return;\n }\n let filteredChanges = (0, filter_1.filterChangesBySeverity)(minSeverity, changes);\n for (const change of filteredChanges) {\n if (change.change_type === 'added' &&\n change.vulnerabilities !== undefined &&\n change.vulnerabilities.length > 0) {\n printChangeVulnerabilities(change);\n failed = true;\n }\n }\n if (failed) {\n throw new Error('Dependency review detected vulnerable packages.');\n }\n else {\n core.info(`Dependency review did not detect any vulnerable packages with severity level \"${minSeverity}\" or above.`);\n }\n }\n catch (error) {\n if (error instanceof request_error_1.RequestError && error.status === 404) {\n core.setFailed(`Dependency review could not obtain dependency data for the specified owner, repository, or revision range.`);\n }\n else if (error instanceof request_error_1.RequestError && error.status === 403) {\n core.setFailed(`Dependency review is not supported on this repository. Please ensure that Dependency graph is enabled, see https://github.com/${github.context.repo.owner}/${github.context.repo.repo}/settings/security_analysis`);\n }\n else {\n if (error instanceof Error) {\n core.setFailed(error.message);\n }\n else {\n core.setFailed('Unexpected fatal error');\n }\n }\n }\n });\n}\nfunction printChangeVulnerabilities(change) {\n for (const vuln of change.vulnerabilities) {\n core.info(`${ansi_styles_1.default.bold.open}${change.manifest} » ${change.name}@${change.version}${ansi_styles_1.default.bold.close} – ${vuln.advisory_summary} ${renderSeverity(vuln.severity)}`);\n core.info(` ↪ ${vuln.advisory_url}`);\n }\n}\nfunction renderSeverity(severity) {\n const color = {\n critical: 'red',\n high: 'red',\n moderate: 'yellow',\n low: 'grey'\n }[severity];\n return `${ansi_styles_1.default.color[color].open}(${severity} severity)${ansi_styles_1.default.color[color].close}`;\n}\nfunction printLicensesError(changes, licenses) {\n if (changes.length == 0) {\n return;\n }\n let { allow = [], deny = [] } = licenses;\n core.info('Dependency review detected prohibited licenses.');\n if (allow.length > 0) {\n core.info('\\nAllowed licenses: ' + allow.join(', ') + '\\n');\n }\n if (deny.length > 0) {\n core.info('\\nDenied licenses: ' + deny.join(', ') + '\\n');\n }\n core.info('The following dependencies have incompatible licenses:\\n');\n for (const change of changes) {\n core.info(`${ansi_styles_1.default.bold.open}${change.manifest} » ${change.name}@${change.version}${ansi_styles_1.default.bold.close} – License: ${ansi_styles_1.default.color.red.open}${change.license}${ansi_styles_1.default.color.red.close}`);\n }\n}\nrun();\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ChangesSchema = exports.ConfigurationOptionsSchema = exports.PullRequestSchema = exports.ChangeSchema = exports.SEVERITIES = void 0;\nconst z = __importStar(require(\"zod\"));\nexports.SEVERITIES = ['critical', 'high', 'moderate', 'low'];\nexports.ChangeSchema = z.object({\n change_type: z.enum(['added', 'removed']),\n manifest: z.string(),\n ecosystem: z.string(),\n name: z.string(),\n version: z.string(),\n package_url: z.string(),\n license: z.string().nullable(),\n source_repository_url: z.string().nullable(),\n vulnerabilities: z\n .array(z.object({\n severity: z.enum(['critical', 'high', 'moderate', 'low']),\n advisory_ghsa_id: z.string(),\n advisory_summary: z.string(),\n advisory_url: z.string()\n }))\n .optional()\n .default([])\n});\nexports.PullRequestSchema = z.object({\n number: z.number(),\n base: z.object({ sha: z.string() }),\n head: z.object({ sha: z.string() })\n});\nexports.ConfigurationOptionsSchema = z\n .object({\n fail_on_severity: z.enum(exports.SEVERITIES).default('low'),\n allow_licenses: z.array(z.string()).default([]),\n deny_licenses: z.array(z.string()).default([])\n})\n .partial()\n .refine(obj => !(obj.allow_licenses && obj.deny_licenses), \"Can't specify both allow_licenses and deny_licenses\");\nexports.ChangesSchema = z.array(exports.ChangeSchema);\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.issue = exports.issueCommand = void 0;\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\n/**\n * Commands\n *\n * Command Format:\n * ::name key=value,key=value::message\n *\n * Examples:\n * ::warning::This is the message\n * ::set-env name=MY_VAR::some value\n */\nfunction issueCommand(command, properties, message) {\n const cmd = new Command(command, properties, message);\n process.stdout.write(cmd.toString() + os.EOL);\n}\nexports.issueCommand = issueCommand;\nfunction issue(name, message = '') {\n issueCommand(name, {}, message);\n}\nexports.issue = issue;\nconst CMD_STRING = '::';\nclass Command {\n constructor(command, properties, message) {\n if (!command) {\n command = 'missing.command';\n }\n this.command = command;\n this.properties = properties;\n this.message = message;\n }\n toString() {\n let cmdStr = CMD_STRING + this.command;\n if (this.properties && Object.keys(this.properties).length > 0) {\n cmdStr += ' ';\n let first = true;\n for (const key in this.properties) {\n if (this.properties.hasOwnProperty(key)) {\n const val = this.properties[key];\n if (val) {\n if (first) {\n first = false;\n }\n else {\n cmdStr += ',';\n }\n cmdStr += `${key}=${escapeProperty(val)}`;\n }\n }\n }\n }\n cmdStr += `${CMD_STRING}${escapeData(this.message)}`;\n return cmdStr;\n }\n}\nfunction escapeData(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A');\n}\nfunction escapeProperty(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A')\n .replace(/:/g, '%3A')\n .replace(/,/g, '%2C');\n}\n//# sourceMappingURL=command.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;\nconst command_1 = require(\"./command\");\nconst file_command_1 = require(\"./file-command\");\nconst utils_1 = require(\"./utils\");\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\nconst oidc_utils_1 = require(\"./oidc-utils\");\n/**\n * The code to exit an action\n */\nvar ExitCode;\n(function (ExitCode) {\n /**\n * A code indicating that the action was successful\n */\n ExitCode[ExitCode[\"Success\"] = 0] = \"Success\";\n /**\n * A code indicating that the action was a failure\n */\n ExitCode[ExitCode[\"Failure\"] = 1] = \"Failure\";\n})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));\n//-----------------------------------------------------------------------\n// Variables\n//-----------------------------------------------------------------------\n/**\n * Sets env variable for this action and future actions in the job\n * @param name the name of the variable to set\n * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction exportVariable(name, val) {\n const convertedVal = utils_1.toCommandValue(val);\n process.env[name] = convertedVal;\n const filePath = process.env['GITHUB_ENV'] || '';\n if (filePath) {\n const delimiter = '_GitHubActionsFileCommandDelimeter_';\n const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;\n file_command_1.issueCommand('ENV', commandValue);\n }\n else {\n command_1.issueCommand('set-env', { name }, convertedVal);\n }\n}\nexports.exportVariable = exportVariable;\n/**\n * Registers a secret which will get masked from logs\n * @param secret value of the secret\n */\nfunction setSecret(secret) {\n command_1.issueCommand('add-mask', {}, secret);\n}\nexports.setSecret = setSecret;\n/**\n * Prepends inputPath to the PATH (for this action and future actions)\n * @param inputPath\n */\nfunction addPath(inputPath) {\n const filePath = process.env['GITHUB_PATH'] || '';\n if (filePath) {\n file_command_1.issueCommand('PATH', inputPath);\n }\n else {\n command_1.issueCommand('add-path', {}, inputPath);\n }\n process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;\n}\nexports.addPath = addPath;\n/**\n * Gets the value of an input.\n * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.\n * Returns an empty string if the value is not defined.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string\n */\nfunction getInput(name, options) {\n const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';\n if (options && options.required && !val) {\n throw new Error(`Input required and not supplied: ${name}`);\n }\n if (options && options.trimWhitespace === false) {\n return val;\n }\n return val.trim();\n}\nexports.getInput = getInput;\n/**\n * Gets the values of an multiline input. Each value is also trimmed.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string[]\n *\n */\nfunction getMultilineInput(name, options) {\n const inputs = getInput(name, options)\n .split('\\n')\n .filter(x => x !== '');\n return inputs;\n}\nexports.getMultilineInput = getMultilineInput;\n/**\n * Gets the input value of the boolean type in the YAML 1.2 \"core schema\" specification.\n * Support boolean input list: `true | True | TRUE | false | False | FALSE` .\n * The return value is also in boolean type.\n * ref: https://yaml.org/spec/1.2/spec.html#id2804923\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns boolean\n */\nfunction getBooleanInput(name, options) {\n const trueValue = ['true', 'True', 'TRUE'];\n const falseValue = ['false', 'False', 'FALSE'];\n const val = getInput(name, options);\n if (trueValue.includes(val))\n return true;\n if (falseValue.includes(val))\n return false;\n throw new TypeError(`Input does not meet YAML 1.2 \"Core Schema\" specification: ${name}\\n` +\n `Support boolean input list: \\`true | True | TRUE | false | False | FALSE\\``);\n}\nexports.getBooleanInput = getBooleanInput;\n/**\n * Sets the value of an output.\n *\n * @param name name of the output to set\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction setOutput(name, value) {\n process.stdout.write(os.EOL);\n command_1.issueCommand('set-output', { name }, value);\n}\nexports.setOutput = setOutput;\n/**\n * Enables or disables the echoing of commands into stdout for the rest of the step.\n * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.\n *\n */\nfunction setCommandEcho(enabled) {\n command_1.issue('echo', enabled ? 'on' : 'off');\n}\nexports.setCommandEcho = setCommandEcho;\n//-----------------------------------------------------------------------\n// Results\n//-----------------------------------------------------------------------\n/**\n * Sets the action status to failed.\n * When the action exits it will be with an exit code of 1\n * @param message add error issue message\n */\nfunction setFailed(message) {\n process.exitCode = ExitCode.Failure;\n error(message);\n}\nexports.setFailed = setFailed;\n//-----------------------------------------------------------------------\n// Logging Commands\n//-----------------------------------------------------------------------\n/**\n * Gets whether Actions Step Debug is on or not\n */\nfunction isDebug() {\n return process.env['RUNNER_DEBUG'] === '1';\n}\nexports.isDebug = isDebug;\n/**\n * Writes debug message to user log\n * @param message debug message\n */\nfunction debug(message) {\n command_1.issueCommand('debug', {}, message);\n}\nexports.debug = debug;\n/**\n * Adds an error issue\n * @param message error issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction error(message, properties = {}) {\n command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.error = error;\n/**\n * Adds a warning issue\n * @param message warning issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction warning(message, properties = {}) {\n command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.warning = warning;\n/**\n * Adds a notice issue\n * @param message notice issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction notice(message, properties = {}) {\n command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.notice = notice;\n/**\n * Writes info to log with console.log.\n * @param message info message\n */\nfunction info(message) {\n process.stdout.write(message + os.EOL);\n}\nexports.info = info;\n/**\n * Begin an output group.\n *\n * Output until the next `groupEnd` will be foldable in this group\n *\n * @param name The name of the output group\n */\nfunction startGroup(name) {\n command_1.issue('group', name);\n}\nexports.startGroup = startGroup;\n/**\n * End an output group.\n */\nfunction endGroup() {\n command_1.issue('endgroup');\n}\nexports.endGroup = endGroup;\n/**\n * Wrap an asynchronous function call in a group.\n *\n * Returns the same type as the function itself.\n *\n * @param name The name of the group\n * @param fn The function to wrap in the group\n */\nfunction group(name, fn) {\n return __awaiter(this, void 0, void 0, function* () {\n startGroup(name);\n let result;\n try {\n result = yield fn();\n }\n finally {\n endGroup();\n }\n return result;\n });\n}\nexports.group = group;\n//-----------------------------------------------------------------------\n// Wrapper action state\n//-----------------------------------------------------------------------\n/**\n * Saves state for current action, the state can only be retrieved by this action's post job execution.\n *\n * @param name name of the state to store\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction saveState(name, value) {\n command_1.issueCommand('save-state', { name }, value);\n}\nexports.saveState = saveState;\n/**\n * Gets the value of an state set by this action's main execution.\n *\n * @param name name of the state to get\n * @returns string\n */\nfunction getState(name) {\n return process.env[`STATE_${name}`] || '';\n}\nexports.getState = getState;\nfunction getIDToken(aud) {\n return __awaiter(this, void 0, void 0, function* () {\n return yield oidc_utils_1.OidcClient.getIDToken(aud);\n });\n}\nexports.getIDToken = getIDToken;\n/**\n * Summary exports\n */\nvar summary_1 = require(\"./summary\");\nObject.defineProperty(exports, \"summary\", { enumerable: true, get: function () { return summary_1.summary; } });\n/**\n * @deprecated use core.summary\n */\nvar summary_2 = require(\"./summary\");\nObject.defineProperty(exports, \"markdownSummary\", { enumerable: true, get: function () { return summary_2.markdownSummary; } });\n//# sourceMappingURL=core.js.map","\"use strict\";\n// For internal use, subject to change.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.issueCommand = void 0;\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fs = __importStar(require(\"fs\"));\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\nfunction issueCommand(command, message) {\n const filePath = process.env[`GITHUB_${command}`];\n if (!filePath) {\n throw new Error(`Unable to find environment variable for file command ${command}`);\n }\n if (!fs.existsSync(filePath)) {\n throw new Error(`Missing file at path: ${filePath}`);\n }\n fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {\n encoding: 'utf8'\n });\n}\nexports.issueCommand = issueCommand;\n//# sourceMappingURL=file-command.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.OidcClient = void 0;\nconst http_client_1 = require(\"@actions/http-client\");\nconst auth_1 = require(\"@actions/http-client/lib/auth\");\nconst core_1 = require(\"./core\");\nclass OidcClient {\n static createHttpClient(allowRetry = true, maxRetry = 10) {\n const requestOptions = {\n allowRetries: allowRetry,\n maxRetries: maxRetry\n };\n return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);\n }\n static getRequestToken() {\n const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];\n if (!token) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');\n }\n return token;\n }\n static getIDTokenUrl() {\n const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];\n if (!runtimeUrl) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');\n }\n return runtimeUrl;\n }\n static getCall(id_token_url) {\n var _a;\n return __awaiter(this, void 0, void 0, function* () {\n const httpclient = OidcClient.createHttpClient();\n const res = yield httpclient\n .getJson(id_token_url)\n .catch(error => {\n throw new Error(`Failed to get ID Token. \\n \n Error Code : ${error.statusCode}\\n \n Error Message: ${error.result.message}`);\n });\n const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;\n if (!id_token) {\n throw new Error('Response json body do not have ID Token field');\n }\n return id_token;\n });\n }\n static getIDToken(audience) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n // New ID Token is requested from action service\n let id_token_url = OidcClient.getIDTokenUrl();\n if (audience) {\n const encodedAudience = encodeURIComponent(audience);\n id_token_url = `${id_token_url}&audience=${encodedAudience}`;\n }\n core_1.debug(`ID token url is ${id_token_url}`);\n const id_token = yield OidcClient.getCall(id_token_url);\n core_1.setSecret(id_token);\n return id_token;\n }\n catch (error) {\n throw new Error(`Error message: ${error.message}`);\n }\n });\n }\n}\nexports.OidcClient = OidcClient;\n//# sourceMappingURL=oidc-utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;\nconst os_1 = require(\"os\");\nconst fs_1 = require(\"fs\");\nconst { access, appendFile, writeFile } = fs_1.promises;\nexports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';\nexports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';\nclass Summary {\n constructor() {\n this._buffer = '';\n }\n /**\n * Finds the summary file path from the environment, rejects if env var is not found or file does not exist\n * Also checks r/w permissions.\n *\n * @returns step summary file path\n */\n filePath() {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._filePath) {\n return this._filePath;\n }\n const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR];\n if (!pathFromEnv) {\n throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);\n }\n try {\n yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);\n }\n catch (_a) {\n throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);\n }\n this._filePath = pathFromEnv;\n return this._filePath;\n });\n }\n /**\n * Wraps content in an HTML tag, adding any HTML attributes\n *\n * @param {string} tag HTML tag to wrap\n * @param {string | null} content content within the tag\n * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add\n *\n * @returns {string} content wrapped in HTML element\n */\n wrap(tag, content, attrs = {}) {\n const htmlAttrs = Object.entries(attrs)\n .map(([key, value]) => ` ${key}=\"${value}\"`)\n .join('');\n if (!content) {\n return `<${tag}${htmlAttrs}>`;\n }\n return `<${tag}${htmlAttrs}>${content}`;\n }\n /**\n * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.\n *\n * @param {SummaryWriteOptions} [options] (optional) options for write operation\n *\n * @returns {Promise} summary instance\n */\n write(options) {\n return __awaiter(this, void 0, void 0, function* () {\n const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);\n const filePath = yield this.filePath();\n const writeFunc = overwrite ? writeFile : appendFile;\n yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });\n return this.emptyBuffer();\n });\n }\n /**\n * Clears the summary buffer and wipes the summary file\n *\n * @returns {Summary} summary instance\n */\n clear() {\n return __awaiter(this, void 0, void 0, function* () {\n return this.emptyBuffer().write({ overwrite: true });\n });\n }\n /**\n * Returns the current summary buffer as a string\n *\n * @returns {string} string of summary buffer\n */\n stringify() {\n return this._buffer;\n }\n /**\n * If the summary buffer is empty\n *\n * @returns {boolen} true if the buffer is empty\n */\n isEmptyBuffer() {\n return this._buffer.length === 0;\n }\n /**\n * Resets the summary buffer without writing to summary file\n *\n * @returns {Summary} summary instance\n */\n emptyBuffer() {\n this._buffer = '';\n return this;\n }\n /**\n * Adds raw text to the summary buffer\n *\n * @param {string} text content to add\n * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)\n *\n * @returns {Summary} summary instance\n */\n addRaw(text, addEOL = false) {\n this._buffer += text;\n return addEOL ? this.addEOL() : this;\n }\n /**\n * Adds the operating system-specific end-of-line marker to the buffer\n *\n * @returns {Summary} summary instance\n */\n addEOL() {\n return this.addRaw(os_1.EOL);\n }\n /**\n * Adds an HTML codeblock to the summary buffer\n *\n * @param {string} code content to render within fenced code block\n * @param {string} lang (optional) language to syntax highlight code\n *\n * @returns {Summary} summary instance\n */\n addCodeBlock(code, lang) {\n const attrs = Object.assign({}, (lang && { lang }));\n const element = this.wrap('pre', this.wrap('code', code), attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML list to the summary buffer\n *\n * @param {string[]} items list of items to render\n * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)\n *\n * @returns {Summary} summary instance\n */\n addList(items, ordered = false) {\n const tag = ordered ? 'ol' : 'ul';\n const listItems = items.map(item => this.wrap('li', item)).join('');\n const element = this.wrap(tag, listItems);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML table to the summary buffer\n *\n * @param {SummaryTableCell[]} rows table rows\n *\n * @returns {Summary} summary instance\n */\n addTable(rows) {\n const tableBody = rows\n .map(row => {\n const cells = row\n .map(cell => {\n if (typeof cell === 'string') {\n return this.wrap('td', cell);\n }\n const { header, data, colspan, rowspan } = cell;\n const tag = header ? 'th' : 'td';\n const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));\n return this.wrap(tag, data, attrs);\n })\n .join('');\n return this.wrap('tr', cells);\n })\n .join('');\n const element = this.wrap('table', tableBody);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds a collapsable HTML details element to the summary buffer\n *\n * @param {string} label text for the closed state\n * @param {string} content collapsable content\n *\n * @returns {Summary} summary instance\n */\n addDetails(label, content) {\n const element = this.wrap('details', this.wrap('summary', label) + content);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML image tag to the summary buffer\n *\n * @param {string} src path to the image you to embed\n * @param {string} alt text description of the image\n * @param {SummaryImageOptions} options (optional) addition image attributes\n *\n * @returns {Summary} summary instance\n */\n addImage(src, alt, options) {\n const { width, height } = options || {};\n const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));\n const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML section heading element\n *\n * @param {string} text heading text\n * @param {number | string} [level=1] (optional) the heading level, default: 1\n *\n * @returns {Summary} summary instance\n */\n addHeading(text, level) {\n const tag = `h${level}`;\n const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)\n ? tag\n : 'h1';\n const element = this.wrap(allowedTag, text);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML thematic break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addSeparator() {\n const element = this.wrap('hr', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML line break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addBreak() {\n const element = this.wrap('br', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML blockquote to the summary buffer\n *\n * @param {string} text quote text\n * @param {string} cite (optional) citation url\n *\n * @returns {Summary} summary instance\n */\n addQuote(text, cite) {\n const attrs = Object.assign({}, (cite && { cite }));\n const element = this.wrap('blockquote', text, attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML anchor tag to the summary buffer\n *\n * @param {string} text link text/content\n * @param {string} href hyperlink\n *\n * @returns {Summary} summary instance\n */\n addLink(text, href) {\n const element = this.wrap('a', text, { href });\n return this.addRaw(element).addEOL();\n }\n}\nconst _summary = new Summary();\n/**\n * @deprecated use `core.summary`\n */\nexports.markdownSummary = _summary;\nexports.summary = _summary;\n//# sourceMappingURL=summary.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toCommandProperties = exports.toCommandValue = void 0;\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\nexports.toCommandValue = toCommandValue;\n/**\n *\n * @param annotationProperties\n * @returns The command properties to send with the actual annotation command\n * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646\n */\nfunction toCommandProperties(annotationProperties) {\n if (!Object.keys(annotationProperties).length) {\n return {};\n }\n return {\n title: annotationProperties.title,\n file: annotationProperties.file,\n line: annotationProperties.startLine,\n endLine: annotationProperties.endLine,\n col: annotationProperties.startColumn,\n endColumn: annotationProperties.endColumn\n };\n}\nexports.toCommandProperties = toCommandProperties;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Context = void 0;\nconst fs_1 = require(\"fs\");\nconst os_1 = require(\"os\");\nclass Context {\n /**\n * Hydrate the context from the environment\n */\n constructor() {\n var _a, _b, _c;\n this.payload = {};\n if (process.env.GITHUB_EVENT_PATH) {\n if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {\n this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));\n }\n else {\n const path = process.env.GITHUB_EVENT_PATH;\n process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);\n }\n }\n this.eventName = process.env.GITHUB_EVENT_NAME;\n this.sha = process.env.GITHUB_SHA;\n this.ref = process.env.GITHUB_REF;\n this.workflow = process.env.GITHUB_WORKFLOW;\n this.action = process.env.GITHUB_ACTION;\n this.actor = process.env.GITHUB_ACTOR;\n this.job = process.env.GITHUB_JOB;\n this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10);\n this.runId = parseInt(process.env.GITHUB_RUN_ID, 10);\n this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`;\n this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`;\n this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`;\n }\n get issue() {\n const payload = this.payload;\n return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });\n }\n get repo() {\n if (process.env.GITHUB_REPOSITORY) {\n const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');\n return { owner, repo };\n }\n if (this.payload.repository) {\n return {\n owner: this.payload.repository.owner.login,\n repo: this.payload.repository.name\n };\n }\n throw new Error(\"context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'\");\n }\n}\nexports.Context = Context;\n//# sourceMappingURL=context.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOctokit = exports.context = void 0;\nconst Context = __importStar(require(\"./context\"));\nconst utils_1 = require(\"./utils\");\nexports.context = new Context.Context();\n/**\n * Returns a hydrated octokit ready to use for GitHub Actions\n *\n * @param token the repo PAT or GITHUB_TOKEN\n * @param options other options to set\n */\nfunction getOctokit(token, options) {\n return new utils_1.GitHub(utils_1.getOctokitOptions(token, options));\n}\nexports.getOctokit = getOctokit;\n//# sourceMappingURL=github.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0;\nconst httpClient = __importStar(require(\"@actions/http-client\"));\nfunction getAuthString(token, options) {\n if (!token && !options.auth) {\n throw new Error('Parameter token or opts.auth is required');\n }\n else if (token && options.auth) {\n throw new Error('Parameters token and opts.auth may not both be specified');\n }\n return typeof options.auth === 'string' ? options.auth : `token ${token}`;\n}\nexports.getAuthString = getAuthString;\nfunction getProxyAgent(destinationUrl) {\n const hc = new httpClient.HttpClient();\n return hc.getAgent(destinationUrl);\n}\nexports.getProxyAgent = getProxyAgent;\nfunction getApiBaseUrl() {\n return process.env['GITHUB_API_URL'] || 'https://api.github.com';\n}\nexports.getApiBaseUrl = getApiBaseUrl;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOctokitOptions = exports.GitHub = exports.context = void 0;\nconst Context = __importStar(require(\"./context\"));\nconst Utils = __importStar(require(\"./internal/utils\"));\n// octokit + plugins\nconst core_1 = require(\"@octokit/core\");\nconst plugin_rest_endpoint_methods_1 = require(\"@octokit/plugin-rest-endpoint-methods\");\nconst plugin_paginate_rest_1 = require(\"@octokit/plugin-paginate-rest\");\nexports.context = new Context.Context();\nconst baseUrl = Utils.getApiBaseUrl();\nconst defaults = {\n baseUrl,\n request: {\n agent: Utils.getProxyAgent(baseUrl)\n }\n};\nexports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(defaults);\n/**\n * Convience function to correctly format Octokit Options to pass into the constructor.\n *\n * @param token the repo PAT or GITHUB_TOKEN\n * @param options other options to set\n */\nfunction getOctokitOptions(token, options) {\n const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller\n // Auth\n const auth = Utils.getAuthString(token, opts);\n if (auth) {\n opts.auth = auth;\n }\n return opts;\n}\nexports.getOctokitOptions = getOctokitOptions;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;\nclass BasicCredentialHandler {\n constructor(username, password) {\n this.username = username;\n this.password = password;\n }\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BasicCredentialHandler = BasicCredentialHandler;\nclass BearerCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Bearer ${this.token}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BearerCredentialHandler = BearerCredentialHandler;\nclass PersonalAccessTokenCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;\n//# sourceMappingURL=auth.js.map","\"use strict\";\n/* eslint-disable @typescript-eslint/no-explicit-any */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;\nconst http = __importStar(require(\"http\"));\nconst https = __importStar(require(\"https\"));\nconst pm = __importStar(require(\"./proxy\"));\nconst tunnel = __importStar(require(\"tunnel\"));\nvar HttpCodes;\n(function (HttpCodes) {\n HttpCodes[HttpCodes[\"OK\"] = 200] = \"OK\";\n HttpCodes[HttpCodes[\"MultipleChoices\"] = 300] = \"MultipleChoices\";\n HttpCodes[HttpCodes[\"MovedPermanently\"] = 301] = \"MovedPermanently\";\n HttpCodes[HttpCodes[\"ResourceMoved\"] = 302] = \"ResourceMoved\";\n HttpCodes[HttpCodes[\"SeeOther\"] = 303] = \"SeeOther\";\n HttpCodes[HttpCodes[\"NotModified\"] = 304] = \"NotModified\";\n HttpCodes[HttpCodes[\"UseProxy\"] = 305] = \"UseProxy\";\n HttpCodes[HttpCodes[\"SwitchProxy\"] = 306] = \"SwitchProxy\";\n HttpCodes[HttpCodes[\"TemporaryRedirect\"] = 307] = \"TemporaryRedirect\";\n HttpCodes[HttpCodes[\"PermanentRedirect\"] = 308] = \"PermanentRedirect\";\n HttpCodes[HttpCodes[\"BadRequest\"] = 400] = \"BadRequest\";\n HttpCodes[HttpCodes[\"Unauthorized\"] = 401] = \"Unauthorized\";\n HttpCodes[HttpCodes[\"PaymentRequired\"] = 402] = \"PaymentRequired\";\n HttpCodes[HttpCodes[\"Forbidden\"] = 403] = \"Forbidden\";\n HttpCodes[HttpCodes[\"NotFound\"] = 404] = \"NotFound\";\n HttpCodes[HttpCodes[\"MethodNotAllowed\"] = 405] = \"MethodNotAllowed\";\n HttpCodes[HttpCodes[\"NotAcceptable\"] = 406] = \"NotAcceptable\";\n HttpCodes[HttpCodes[\"ProxyAuthenticationRequired\"] = 407] = \"ProxyAuthenticationRequired\";\n HttpCodes[HttpCodes[\"RequestTimeout\"] = 408] = \"RequestTimeout\";\n HttpCodes[HttpCodes[\"Conflict\"] = 409] = \"Conflict\";\n HttpCodes[HttpCodes[\"Gone\"] = 410] = \"Gone\";\n HttpCodes[HttpCodes[\"TooManyRequests\"] = 429] = \"TooManyRequests\";\n HttpCodes[HttpCodes[\"InternalServerError\"] = 500] = \"InternalServerError\";\n HttpCodes[HttpCodes[\"NotImplemented\"] = 501] = \"NotImplemented\";\n HttpCodes[HttpCodes[\"BadGateway\"] = 502] = \"BadGateway\";\n HttpCodes[HttpCodes[\"ServiceUnavailable\"] = 503] = \"ServiceUnavailable\";\n HttpCodes[HttpCodes[\"GatewayTimeout\"] = 504] = \"GatewayTimeout\";\n})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));\nvar Headers;\n(function (Headers) {\n Headers[\"Accept\"] = \"accept\";\n Headers[\"ContentType\"] = \"content-type\";\n})(Headers = exports.Headers || (exports.Headers = {}));\nvar MediaTypes;\n(function (MediaTypes) {\n MediaTypes[\"ApplicationJson\"] = \"application/json\";\n})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));\n/**\n * Returns the proxy URL, depending upon the supplied url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\nfunction getProxyUrl(serverUrl) {\n const proxyUrl = pm.getProxyUrl(new URL(serverUrl));\n return proxyUrl ? proxyUrl.href : '';\n}\nexports.getProxyUrl = getProxyUrl;\nconst HttpRedirectCodes = [\n HttpCodes.MovedPermanently,\n HttpCodes.ResourceMoved,\n HttpCodes.SeeOther,\n HttpCodes.TemporaryRedirect,\n HttpCodes.PermanentRedirect\n];\nconst HttpResponseRetryCodes = [\n HttpCodes.BadGateway,\n HttpCodes.ServiceUnavailable,\n HttpCodes.GatewayTimeout\n];\nconst RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];\nconst ExponentialBackoffCeiling = 10;\nconst ExponentialBackoffTimeSlice = 5;\nclass HttpClientError extends Error {\n constructor(message, statusCode) {\n super(message);\n this.name = 'HttpClientError';\n this.statusCode = statusCode;\n Object.setPrototypeOf(this, HttpClientError.prototype);\n }\n}\nexports.HttpClientError = HttpClientError;\nclass HttpClientResponse {\n constructor(message) {\n this.message = message;\n }\n readBody() {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {\n let output = Buffer.alloc(0);\n this.message.on('data', (chunk) => {\n output = Buffer.concat([output, chunk]);\n });\n this.message.on('end', () => {\n resolve(output.toString());\n });\n }));\n });\n }\n}\nexports.HttpClientResponse = HttpClientResponse;\nfunction isHttps(requestUrl) {\n const parsedUrl = new URL(requestUrl);\n return parsedUrl.protocol === 'https:';\n}\nexports.isHttps = isHttps;\nclass HttpClient {\n constructor(userAgent, handlers, requestOptions) {\n this._ignoreSslError = false;\n this._allowRedirects = true;\n this._allowRedirectDowngrade = false;\n this._maxRedirects = 50;\n this._allowRetries = false;\n this._maxRetries = 1;\n this._keepAlive = false;\n this._disposed = false;\n this.userAgent = userAgent;\n this.handlers = handlers || [];\n this.requestOptions = requestOptions;\n if (requestOptions) {\n if (requestOptions.ignoreSslError != null) {\n this._ignoreSslError = requestOptions.ignoreSslError;\n }\n this._socketTimeout = requestOptions.socketTimeout;\n if (requestOptions.allowRedirects != null) {\n this._allowRedirects = requestOptions.allowRedirects;\n }\n if (requestOptions.allowRedirectDowngrade != null) {\n this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;\n }\n if (requestOptions.maxRedirects != null) {\n this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);\n }\n if (requestOptions.keepAlive != null) {\n this._keepAlive = requestOptions.keepAlive;\n }\n if (requestOptions.allowRetries != null) {\n this._allowRetries = requestOptions.allowRetries;\n }\n if (requestOptions.maxRetries != null) {\n this._maxRetries = requestOptions.maxRetries;\n }\n }\n }\n options(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});\n });\n }\n get(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('GET', requestUrl, null, additionalHeaders || {});\n });\n }\n del(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('DELETE', requestUrl, null, additionalHeaders || {});\n });\n }\n post(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('POST', requestUrl, data, additionalHeaders || {});\n });\n }\n patch(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PATCH', requestUrl, data, additionalHeaders || {});\n });\n }\n put(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PUT', requestUrl, data, additionalHeaders || {});\n });\n }\n head(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('HEAD', requestUrl, null, additionalHeaders || {});\n });\n }\n sendStream(verb, requestUrl, stream, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request(verb, requestUrl, stream, additionalHeaders);\n });\n }\n /**\n * Gets a typed object from an endpoint\n * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise\n */\n getJson(requestUrl, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n const res = yield this.get(requestUrl, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n postJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.post(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n putJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.put(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n patchJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.patch(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n /**\n * Makes a raw http request.\n * All other methods such as get, post, patch, and request ultimately call this.\n * Prefer get, del, post and patch\n */\n request(verb, requestUrl, data, headers) {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._disposed) {\n throw new Error('Client has already been disposed.');\n }\n const parsedUrl = new URL(requestUrl);\n let info = this._prepareRequest(verb, parsedUrl, headers);\n // Only perform retries on reads since writes may not be idempotent.\n const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)\n ? this._maxRetries + 1\n : 1;\n let numTries = 0;\n let response;\n do {\n response = yield this.requestRaw(info, data);\n // Check if it's an authentication challenge\n if (response &&\n response.message &&\n response.message.statusCode === HttpCodes.Unauthorized) {\n let authenticationHandler;\n for (const handler of this.handlers) {\n if (handler.canHandleAuthentication(response)) {\n authenticationHandler = handler;\n break;\n }\n }\n if (authenticationHandler) {\n return authenticationHandler.handleAuthentication(this, info, data);\n }\n else {\n // We have received an unauthorized response but have no handlers to handle it.\n // Let the response return to the caller.\n return response;\n }\n }\n let redirectsRemaining = this._maxRedirects;\n while (response.message.statusCode &&\n HttpRedirectCodes.includes(response.message.statusCode) &&\n this._allowRedirects &&\n redirectsRemaining > 0) {\n const redirectUrl = response.message.headers['location'];\n if (!redirectUrl) {\n // if there's no location to redirect to, we won't\n break;\n }\n const parsedRedirectUrl = new URL(redirectUrl);\n if (parsedUrl.protocol === 'https:' &&\n parsedUrl.protocol !== parsedRedirectUrl.protocol &&\n !this._allowRedirectDowngrade) {\n throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');\n }\n // we need to finish reading the response before reassigning response\n // which will leak the open socket.\n yield response.readBody();\n // strip authorization header if redirected to a different hostname\n if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {\n for (const header in headers) {\n // header names are case insensitive\n if (header.toLowerCase() === 'authorization') {\n delete headers[header];\n }\n }\n }\n // let's make the request with the new redirectUrl\n info = this._prepareRequest(verb, parsedRedirectUrl, headers);\n response = yield this.requestRaw(info, data);\n redirectsRemaining--;\n }\n if (!response.message.statusCode ||\n !HttpResponseRetryCodes.includes(response.message.statusCode)) {\n // If not a retry code, return immediately instead of retrying\n return response;\n }\n numTries += 1;\n if (numTries < maxTries) {\n yield response.readBody();\n yield this._performExponentialBackoff(numTries);\n }\n } while (numTries < maxTries);\n return response;\n });\n }\n /**\n * Needs to be called if keepAlive is set to true in request options.\n */\n dispose() {\n if (this._agent) {\n this._agent.destroy();\n }\n this._disposed = true;\n }\n /**\n * Raw request.\n * @param info\n * @param data\n */\n requestRaw(info, data) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => {\n function callbackForResult(err, res) {\n if (err) {\n reject(err);\n }\n else if (!res) {\n // If `err` is not passed, then `res` must be passed.\n reject(new Error('Unknown error'));\n }\n else {\n resolve(res);\n }\n }\n this.requestRawWithCallback(info, data, callbackForResult);\n });\n });\n }\n /**\n * Raw request with callback.\n * @param info\n * @param data\n * @param onResult\n */\n requestRawWithCallback(info, data, onResult) {\n if (typeof data === 'string') {\n if (!info.options.headers) {\n info.options.headers = {};\n }\n info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');\n }\n let callbackCalled = false;\n function handleResult(err, res) {\n if (!callbackCalled) {\n callbackCalled = true;\n onResult(err, res);\n }\n }\n const req = info.httpModule.request(info.options, (msg) => {\n const res = new HttpClientResponse(msg);\n handleResult(undefined, res);\n });\n let socket;\n req.on('socket', sock => {\n socket = sock;\n });\n // If we ever get disconnected, we want the socket to timeout eventually\n req.setTimeout(this._socketTimeout || 3 * 60000, () => {\n if (socket) {\n socket.end();\n }\n handleResult(new Error(`Request timeout: ${info.options.path}`));\n });\n req.on('error', function (err) {\n // err has statusCode property\n // res should have headers\n handleResult(err);\n });\n if (data && typeof data === 'string') {\n req.write(data, 'utf8');\n }\n if (data && typeof data !== 'string') {\n data.on('close', function () {\n req.end();\n });\n data.pipe(req);\n }\n else {\n req.end();\n }\n }\n /**\n * Gets an http agent. This function is useful when you need an http agent that handles\n * routing through a proxy server - depending upon the url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\n getAgent(serverUrl) {\n const parsedUrl = new URL(serverUrl);\n return this._getAgent(parsedUrl);\n }\n _prepareRequest(method, requestUrl, headers) {\n const info = {};\n info.parsedUrl = requestUrl;\n const usingSsl = info.parsedUrl.protocol === 'https:';\n info.httpModule = usingSsl ? https : http;\n const defaultPort = usingSsl ? 443 : 80;\n info.options = {};\n info.options.host = info.parsedUrl.hostname;\n info.options.port = info.parsedUrl.port\n ? parseInt(info.parsedUrl.port)\n : defaultPort;\n info.options.path =\n (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');\n info.options.method = method;\n info.options.headers = this._mergeHeaders(headers);\n if (this.userAgent != null) {\n info.options.headers['user-agent'] = this.userAgent;\n }\n info.options.agent = this._getAgent(info.parsedUrl);\n // gives handlers an opportunity to participate\n if (this.handlers) {\n for (const handler of this.handlers) {\n handler.prepareRequest(info.options);\n }\n }\n return info;\n }\n _mergeHeaders(headers) {\n if (this.requestOptions && this.requestOptions.headers) {\n return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));\n }\n return lowercaseKeys(headers || {});\n }\n _getExistingOrDefaultHeader(additionalHeaders, header, _default) {\n let clientHeader;\n if (this.requestOptions && this.requestOptions.headers) {\n clientHeader = lowercaseKeys(this.requestOptions.headers)[header];\n }\n return additionalHeaders[header] || clientHeader || _default;\n }\n _getAgent(parsedUrl) {\n let agent;\n const proxyUrl = pm.getProxyUrl(parsedUrl);\n const useProxy = proxyUrl && proxyUrl.hostname;\n if (this._keepAlive && useProxy) {\n agent = this._proxyAgent;\n }\n if (this._keepAlive && !useProxy) {\n agent = this._agent;\n }\n // if agent is already assigned use that agent.\n if (agent) {\n return agent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n let maxSockets = 100;\n if (this.requestOptions) {\n maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;\n }\n // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.\n if (proxyUrl && proxyUrl.hostname) {\n const agentOptions = {\n maxSockets,\n keepAlive: this._keepAlive,\n proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {\n proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`\n })), { host: proxyUrl.hostname, port: proxyUrl.port })\n };\n let tunnelAgent;\n const overHttps = proxyUrl.protocol === 'https:';\n if (usingSsl) {\n tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;\n }\n else {\n tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;\n }\n agent = tunnelAgent(agentOptions);\n this._proxyAgent = agent;\n }\n // if reusing agent across request and tunneling agent isn't assigned create a new agent\n if (this._keepAlive && !agent) {\n const options = { keepAlive: this._keepAlive, maxSockets };\n agent = usingSsl ? new https.Agent(options) : new http.Agent(options);\n this._agent = agent;\n }\n // if not using private agent and tunnel agent isn't setup then use global agent\n if (!agent) {\n agent = usingSsl ? https.globalAgent : http.globalAgent;\n }\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n agent.options = Object.assign(agent.options || {}, {\n rejectUnauthorized: false\n });\n }\n return agent;\n }\n _performExponentialBackoff(retryNumber) {\n return __awaiter(this, void 0, void 0, function* () {\n retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);\n const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);\n return new Promise(resolve => setTimeout(() => resolve(), ms));\n });\n }\n _processResponse(res, options) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {\n const statusCode = res.message.statusCode || 0;\n const response = {\n statusCode,\n result: null,\n headers: {}\n };\n // not found leads to null obj returned\n if (statusCode === HttpCodes.NotFound) {\n resolve(response);\n }\n // get the result from the body\n function dateTimeDeserializer(key, value) {\n if (typeof value === 'string') {\n const a = new Date(value);\n if (!isNaN(a.valueOf())) {\n return a;\n }\n }\n return value;\n }\n let obj;\n let contents;\n try {\n contents = yield res.readBody();\n if (contents && contents.length > 0) {\n if (options && options.deserializeDates) {\n obj = JSON.parse(contents, dateTimeDeserializer);\n }\n else {\n obj = JSON.parse(contents);\n }\n response.result = obj;\n }\n response.headers = res.message.headers;\n }\n catch (err) {\n // Invalid resource (contents not json); leaving result obj null\n }\n // note that 3xx redirects are handled by the http layer.\n if (statusCode > 299) {\n let msg;\n // if exception/error in body, attempt to get better error\n if (obj && obj.message) {\n msg = obj.message;\n }\n else if (contents && contents.length > 0) {\n // it may be the case that the exception is in the body message as string\n msg = contents;\n }\n else {\n msg = `Failed request: (${statusCode})`;\n }\n const err = new HttpClientError(msg, statusCode);\n err.result = response.result;\n reject(err);\n }\n else {\n resolve(response);\n }\n }));\n });\n }\n}\nexports.HttpClient = HttpClient;\nconst lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.checkBypass = exports.getProxyUrl = void 0;\nfunction getProxyUrl(reqUrl) {\n const usingSsl = reqUrl.protocol === 'https:';\n if (checkBypass(reqUrl)) {\n return undefined;\n }\n const proxyVar = (() => {\n if (usingSsl) {\n return process.env['https_proxy'] || process.env['HTTPS_PROXY'];\n }\n else {\n return process.env['http_proxy'] || process.env['HTTP_PROXY'];\n }\n })();\n if (proxyVar) {\n return new URL(proxyVar);\n }\n else {\n return undefined;\n }\n}\nexports.getProxyUrl = getProxyUrl;\nfunction checkBypass(reqUrl) {\n if (!reqUrl.hostname) {\n return false;\n }\n const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';\n if (!noProxy) {\n return false;\n }\n // Determine the request port\n let reqPort;\n if (reqUrl.port) {\n reqPort = Number(reqUrl.port);\n }\n else if (reqUrl.protocol === 'http:') {\n reqPort = 80;\n }\n else if (reqUrl.protocol === 'https:') {\n reqPort = 443;\n }\n // Format the request hostname and hostname with port\n const upperReqHosts = [reqUrl.hostname.toUpperCase()];\n if (typeof reqPort === 'number') {\n upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);\n }\n // Compare request host against noproxy\n for (const upperNoProxyItem of noProxy\n .split(',')\n .map(x => x.trim().toUpperCase())\n .filter(x => x)) {\n if (upperReqHosts.some(x => x === upperNoProxyItem)) {\n return true;\n }\n }\n return false;\n}\nexports.checkBypass = checkBypass;\n//# sourceMappingURL=proxy.js.map","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst REGEX_IS_INSTALLATION_LEGACY = /^v1\\./;\nconst REGEX_IS_INSTALLATION = /^ghs_/;\nconst REGEX_IS_USER_TO_SERVER = /^ghu_/;\nasync function auth(token) {\n const isApp = token.split(/\\./).length === 3;\n const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token);\n const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token);\n const tokenType = isApp ? \"app\" : isInstallation ? \"installation\" : isUserToServer ? \"user-to-server\" : \"oauth\";\n return {\n type: \"token\",\n token: token,\n tokenType\n };\n}\n\n/**\n * Prefix token for usage in the Authorization header\n *\n * @param token OAuth token or JSON Web Token\n */\nfunction withAuthorizationPrefix(token) {\n if (token.split(/\\./).length === 3) {\n return `bearer ${token}`;\n }\n\n return `token ${token}`;\n}\n\nasync function hook(token, request, route, parameters) {\n const endpoint = request.endpoint.merge(route, parameters);\n endpoint.headers.authorization = withAuthorizationPrefix(token);\n return request(endpoint);\n}\n\nconst createTokenAuth = function createTokenAuth(token) {\n if (!token) {\n throw new Error(\"[@octokit/auth-token] No token passed to createTokenAuth\");\n }\n\n if (typeof token !== \"string\") {\n throw new Error(\"[@octokit/auth-token] Token passed to createTokenAuth is not a string\");\n }\n\n token = token.replace(/^(token|bearer) +/i, \"\");\n return Object.assign(auth.bind(null, token), {\n hook: hook.bind(null, token)\n });\n};\n\nexports.createTokenAuth = createTokenAuth;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar universalUserAgent = require('universal-user-agent');\nvar beforeAfterHook = require('before-after-hook');\nvar request = require('@octokit/request');\nvar graphql = require('@octokit/graphql');\nvar authToken = require('@octokit/auth-token');\n\nfunction _objectWithoutPropertiesLoose(source, excluded) {\n if (source == null) return {};\n var target = {};\n var sourceKeys = Object.keys(source);\n var key, i;\n\n for (i = 0; i < sourceKeys.length; i++) {\n key = sourceKeys[i];\n if (excluded.indexOf(key) >= 0) continue;\n target[key] = source[key];\n }\n\n return target;\n}\n\nfunction _objectWithoutProperties(source, excluded) {\n if (source == null) return {};\n\n var target = _objectWithoutPropertiesLoose(source, excluded);\n\n var key, i;\n\n if (Object.getOwnPropertySymbols) {\n var sourceSymbolKeys = Object.getOwnPropertySymbols(source);\n\n for (i = 0; i < sourceSymbolKeys.length; i++) {\n key = sourceSymbolKeys[i];\n if (excluded.indexOf(key) >= 0) continue;\n if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue;\n target[key] = source[key];\n }\n }\n\n return target;\n}\n\nconst VERSION = \"3.6.0\";\n\nconst _excluded = [\"authStrategy\"];\nclass Octokit {\n constructor(options = {}) {\n const hook = new beforeAfterHook.Collection();\n const requestDefaults = {\n baseUrl: request.request.endpoint.DEFAULTS.baseUrl,\n headers: {},\n request: Object.assign({}, options.request, {\n // @ts-ignore internal usage only, no need to type\n hook: hook.bind(null, \"request\")\n }),\n mediaType: {\n previews: [],\n format: \"\"\n }\n }; // prepend default user agent with `options.userAgent` if set\n\n requestDefaults.headers[\"user-agent\"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(\" \");\n\n if (options.baseUrl) {\n requestDefaults.baseUrl = options.baseUrl;\n }\n\n if (options.previews) {\n requestDefaults.mediaType.previews = options.previews;\n }\n\n if (options.timeZone) {\n requestDefaults.headers[\"time-zone\"] = options.timeZone;\n }\n\n this.request = request.request.defaults(requestDefaults);\n this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults);\n this.log = Object.assign({\n debug: () => {},\n info: () => {},\n warn: console.warn.bind(console),\n error: console.error.bind(console)\n }, options.log);\n this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance\n // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.\n // (2) If only `options.auth` is set, use the default token authentication strategy.\n // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.\n // TODO: type `options.auth` based on `options.authStrategy`.\n\n if (!options.authStrategy) {\n if (!options.auth) {\n // (1)\n this.auth = async () => ({\n type: \"unauthenticated\"\n });\n } else {\n // (2)\n const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\\_(ツ)_/¯\n\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n } else {\n const {\n authStrategy\n } = options,\n otherOptions = _objectWithoutProperties(options, _excluded);\n\n const auth = authStrategy(Object.assign({\n request: this.request,\n log: this.log,\n // we pass the current octokit instance as well as its constructor options\n // to allow for authentication strategies that return a new octokit instance\n // that shares the same internal state as the current one. The original\n // requirement for this was the \"event-octokit\" authentication strategy\n // of https://github.com/probot/octokit-auth-probot.\n octokit: this,\n octokitOptions: otherOptions\n }, options.auth)); // @ts-ignore ¯\\_(ツ)_/¯\n\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n } // apply plugins\n // https://stackoverflow.com/a/16345172\n\n\n const classConstructor = this.constructor;\n classConstructor.plugins.forEach(plugin => {\n Object.assign(this, plugin(this, options));\n });\n }\n\n static defaults(defaults) {\n const OctokitWithDefaults = class extends this {\n constructor(...args) {\n const options = args[0] || {};\n\n if (typeof defaults === \"function\") {\n super(defaults(options));\n return;\n }\n\n super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? {\n userAgent: `${options.userAgent} ${defaults.userAgent}`\n } : null));\n }\n\n };\n return OctokitWithDefaults;\n }\n /**\n * Attach a plugin (or many) to your Octokit instance.\n *\n * @example\n * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)\n */\n\n\n static plugin(...newPlugins) {\n var _a;\n\n const currentPlugins = this.plugins;\n const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a);\n return NewOctokit;\n }\n\n}\nOctokit.VERSION = VERSION;\nOctokit.plugins = [];\n\nexports.Octokit = Octokit;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar isPlainObject = require('is-plain-object');\nvar universalUserAgent = require('universal-user-agent');\n\nfunction lowercaseKeys(object) {\n if (!object) {\n return {};\n }\n\n return Object.keys(object).reduce((newObj, key) => {\n newObj[key.toLowerCase()] = object[key];\n return newObj;\n }, {});\n}\n\nfunction mergeDeep(defaults, options) {\n const result = Object.assign({}, defaults);\n Object.keys(options).forEach(key => {\n if (isPlainObject.isPlainObject(options[key])) {\n if (!(key in defaults)) Object.assign(result, {\n [key]: options[key]\n });else result[key] = mergeDeep(defaults[key], options[key]);\n } else {\n Object.assign(result, {\n [key]: options[key]\n });\n }\n });\n return result;\n}\n\nfunction removeUndefinedProperties(obj) {\n for (const key in obj) {\n if (obj[key] === undefined) {\n delete obj[key];\n }\n }\n\n return obj;\n}\n\nfunction merge(defaults, route, options) {\n if (typeof route === \"string\") {\n let [method, url] = route.split(\" \");\n options = Object.assign(url ? {\n method,\n url\n } : {\n url: method\n }, options);\n } else {\n options = Object.assign({}, route);\n } // lowercase header names before merging with defaults to avoid duplicates\n\n\n options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging\n\n removeUndefinedProperties(options);\n removeUndefinedProperties(options.headers);\n const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten\n\n if (defaults && defaults.mediaType.previews.length) {\n mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);\n }\n\n mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, \"\"));\n return mergedOptions;\n}\n\nfunction addQueryParameters(url, parameters) {\n const separator = /\\?/.test(url) ? \"&\" : \"?\";\n const names = Object.keys(parameters);\n\n if (names.length === 0) {\n return url;\n }\n\n return url + separator + names.map(name => {\n if (name === \"q\") {\n return \"q=\" + parameters.q.split(\"+\").map(encodeURIComponent).join(\"+\");\n }\n\n return `${name}=${encodeURIComponent(parameters[name])}`;\n }).join(\"&\");\n}\n\nconst urlVariableRegex = /\\{[^}]+\\}/g;\n\nfunction removeNonChars(variableName) {\n return variableName.replace(/^\\W+|\\W+$/g, \"\").split(/,/);\n}\n\nfunction extractUrlVariableNames(url) {\n const matches = url.match(urlVariableRegex);\n\n if (!matches) {\n return [];\n }\n\n return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);\n}\n\nfunction omit(object, keysToOmit) {\n return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => {\n obj[key] = object[key];\n return obj;\n }, {});\n}\n\n// Based on https://github.com/bramstein/url-template, licensed under BSD\n// TODO: create separate package.\n//\n// Copyright (c) 2012-2014, Bram Stein\n// All rights reserved.\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions\n// are met:\n// 1. Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n// 2. Redistributions in binary form must reproduce the above copyright\n// notice, this list of conditions and the following disclaimer in the\n// documentation and/or other materials provided with the distribution.\n// 3. The name of the author may not be used to endorse or promote products\n// derived from this software without specific prior written permission.\n// THIS SOFTWARE IS PROVIDED BY THE AUTHOR \"AS IS\" AND ANY EXPRESS OR IMPLIED\n// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\n// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO\n// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,\n// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,\n// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY\n// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,\n// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n/* istanbul ignore file */\nfunction encodeReserved(str) {\n return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) {\n if (!/%[0-9A-Fa-f]/.test(part)) {\n part = encodeURI(part).replace(/%5B/g, \"[\").replace(/%5D/g, \"]\");\n }\n\n return part;\n }).join(\"\");\n}\n\nfunction encodeUnreserved(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {\n return \"%\" + c.charCodeAt(0).toString(16).toUpperCase();\n });\n}\n\nfunction encodeValue(operator, value, key) {\n value = operator === \"+\" || operator === \"#\" ? encodeReserved(value) : encodeUnreserved(value);\n\n if (key) {\n return encodeUnreserved(key) + \"=\" + value;\n } else {\n return value;\n }\n}\n\nfunction isDefined(value) {\n return value !== undefined && value !== null;\n}\n\nfunction isKeyOperator(operator) {\n return operator === \";\" || operator === \"&\" || operator === \"?\";\n}\n\nfunction getValues(context, operator, key, modifier) {\n var value = context[key],\n result = [];\n\n if (isDefined(value) && value !== \"\") {\n if (typeof value === \"string\" || typeof value === \"number\" || typeof value === \"boolean\") {\n value = value.toString();\n\n if (modifier && modifier !== \"*\") {\n value = value.substring(0, parseInt(modifier, 10));\n }\n\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n } else {\n if (modifier === \"*\") {\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n });\n } else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n result.push(encodeValue(operator, value[k], k));\n }\n });\n }\n } else {\n const tmp = [];\n\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n tmp.push(encodeValue(operator, value));\n });\n } else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n tmp.push(encodeUnreserved(k));\n tmp.push(encodeValue(operator, value[k].toString()));\n }\n });\n }\n\n if (isKeyOperator(operator)) {\n result.push(encodeUnreserved(key) + \"=\" + tmp.join(\",\"));\n } else if (tmp.length !== 0) {\n result.push(tmp.join(\",\"));\n }\n }\n }\n } else {\n if (operator === \";\") {\n if (isDefined(value)) {\n result.push(encodeUnreserved(key));\n }\n } else if (value === \"\" && (operator === \"&\" || operator === \"?\")) {\n result.push(encodeUnreserved(key) + \"=\");\n } else if (value === \"\") {\n result.push(\"\");\n }\n }\n\n return result;\n}\n\nfunction parseUrl(template) {\n return {\n expand: expand.bind(null, template)\n };\n}\n\nfunction expand(template, context) {\n var operators = [\"+\", \"#\", \".\", \"/\", \";\", \"?\", \"&\"];\n return template.replace(/\\{([^\\{\\}]+)\\}|([^\\{\\}]+)/g, function (_, expression, literal) {\n if (expression) {\n let operator = \"\";\n const values = [];\n\n if (operators.indexOf(expression.charAt(0)) !== -1) {\n operator = expression.charAt(0);\n expression = expression.substr(1);\n }\n\n expression.split(/,/g).forEach(function (variable) {\n var tmp = /([^:\\*]*)(?::(\\d+)|(\\*))?/.exec(variable);\n values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));\n });\n\n if (operator && operator !== \"+\") {\n var separator = \",\";\n\n if (operator === \"?\") {\n separator = \"&\";\n } else if (operator !== \"#\") {\n separator = operator;\n }\n\n return (values.length !== 0 ? operator : \"\") + values.join(separator);\n } else {\n return values.join(\",\");\n }\n } else {\n return encodeReserved(literal);\n }\n });\n}\n\nfunction parse(options) {\n // https://fetch.spec.whatwg.org/#methods\n let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible\n\n let url = (options.url || \"/\").replace(/:([a-z]\\w+)/g, \"{$1}\");\n let headers = Object.assign({}, options.headers);\n let body;\n let parameters = omit(options, [\"method\", \"baseUrl\", \"url\", \"headers\", \"request\", \"mediaType\"]); // extract variable names from URL to calculate remaining variables later\n\n const urlVariableNames = extractUrlVariableNames(url);\n url = parseUrl(url).expand(parameters);\n\n if (!/^http/.test(url)) {\n url = options.baseUrl + url;\n }\n\n const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat(\"baseUrl\");\n const remainingParameters = omit(parameters, omittedParameters);\n const isBinaryRequest = /application\\/octet-stream/i.test(headers.accept);\n\n if (!isBinaryRequest) {\n if (options.mediaType.format) {\n // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw\n headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\\/vnd(\\.\\w+)(\\.v3)?(\\.\\w+)?(\\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(\",\");\n }\n\n if (options.mediaType.previews.length) {\n const previewsFromAcceptHeader = headers.accept.match(/[\\w-]+(?=-preview)/g) || [];\n headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => {\n const format = options.mediaType.format ? `.${options.mediaType.format}` : \"+json\";\n return `application/vnd.github.${preview}-preview${format}`;\n }).join(\",\");\n }\n } // for GET/HEAD requests, set URL query parameters from remaining parameters\n // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters\n\n\n if ([\"GET\", \"HEAD\"].includes(method)) {\n url = addQueryParameters(url, remainingParameters);\n } else {\n if (\"data\" in remainingParameters) {\n body = remainingParameters.data;\n } else {\n if (Object.keys(remainingParameters).length) {\n body = remainingParameters;\n } else {\n headers[\"content-length\"] = 0;\n }\n }\n } // default content-type for JSON if body is set\n\n\n if (!headers[\"content-type\"] && typeof body !== \"undefined\") {\n headers[\"content-type\"] = \"application/json; charset=utf-8\";\n } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.\n // fetch does not allow to set `content-length` header, but we can set body to an empty string\n\n\n if ([\"PATCH\", \"PUT\"].includes(method) && typeof body === \"undefined\") {\n body = \"\";\n } // Only return body/request keys if present\n\n\n return Object.assign({\n method,\n url,\n headers\n }, typeof body !== \"undefined\" ? {\n body\n } : null, options.request ? {\n request: options.request\n } : null);\n}\n\nfunction endpointWithDefaults(defaults, route, options) {\n return parse(merge(defaults, route, options));\n}\n\nfunction withDefaults(oldDefaults, newDefaults) {\n const DEFAULTS = merge(oldDefaults, newDefaults);\n const endpoint = endpointWithDefaults.bind(null, DEFAULTS);\n return Object.assign(endpoint, {\n DEFAULTS,\n defaults: withDefaults.bind(null, DEFAULTS),\n merge: merge.bind(null, DEFAULTS),\n parse\n });\n}\n\nconst VERSION = \"6.0.12\";\n\nconst userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url.\n// So we use RequestParameters and add method as additional required property.\n\nconst DEFAULTS = {\n method: \"GET\",\n baseUrl: \"https://api.github.com\",\n headers: {\n accept: \"application/vnd.github.v3+json\",\n \"user-agent\": userAgent\n },\n mediaType: {\n format: \"\",\n previews: []\n }\n};\n\nconst endpoint = withDefaults(null, DEFAULTS);\n\nexports.endpoint = endpoint;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar request = require('@octokit/request');\nvar universalUserAgent = require('universal-user-agent');\n\nconst VERSION = \"4.8.0\";\n\nfunction _buildMessageForResponseErrors(data) {\n return `Request failed due to following response errors:\\n` + data.errors.map(e => ` - ${e.message}`).join(\"\\n\");\n}\n\nclass GraphqlResponseError extends Error {\n constructor(request, headers, response) {\n super(_buildMessageForResponseErrors(response));\n this.request = request;\n this.headers = headers;\n this.response = response;\n this.name = \"GraphqlResponseError\"; // Expose the errors and response data in their shorthand properties.\n\n this.errors = response.errors;\n this.data = response.data; // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n\n}\n\nconst NON_VARIABLE_OPTIONS = [\"method\", \"baseUrl\", \"url\", \"headers\", \"request\", \"query\", \"mediaType\"];\nconst FORBIDDEN_VARIABLE_OPTIONS = [\"query\", \"method\", \"url\"];\nconst GHES_V3_SUFFIX_REGEX = /\\/api\\/v3\\/?$/;\nfunction graphql(request, query, options) {\n if (options) {\n if (typeof query === \"string\" && \"query\" in options) {\n return Promise.reject(new Error(`[@octokit/graphql] \"query\" cannot be used as variable name`));\n }\n\n for (const key in options) {\n if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue;\n return Promise.reject(new Error(`[@octokit/graphql] \"${key}\" cannot be used as variable name`));\n }\n }\n\n const parsedOptions = typeof query === \"string\" ? Object.assign({\n query\n }, options) : query;\n const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {\n if (NON_VARIABLE_OPTIONS.includes(key)) {\n result[key] = parsedOptions[key];\n return result;\n }\n\n if (!result.variables) {\n result.variables = {};\n }\n\n result.variables[key] = parsedOptions[key];\n return result;\n }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix\n // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451\n\n const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;\n\n if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {\n requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, \"/api/graphql\");\n }\n\n return request(requestOptions).then(response => {\n if (response.data.errors) {\n const headers = {};\n\n for (const key of Object.keys(response.headers)) {\n headers[key] = response.headers[key];\n }\n\n throw new GraphqlResponseError(requestOptions, headers, response.data);\n }\n\n return response.data.data;\n });\n}\n\nfunction withDefaults(request$1, newDefaults) {\n const newRequest = request$1.defaults(newDefaults);\n\n const newApi = (query, options) => {\n return graphql(newRequest, query, options);\n };\n\n return Object.assign(newApi, {\n defaults: withDefaults.bind(null, newRequest),\n endpoint: request.request.endpoint\n });\n}\n\nconst graphql$1 = withDefaults(request.request, {\n headers: {\n \"user-agent\": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n },\n method: \"POST\",\n url: \"/graphql\"\n});\nfunction withCustomRequest(customRequest) {\n return withDefaults(customRequest, {\n method: \"POST\",\n url: \"/graphql\"\n });\n}\n\nexports.GraphqlResponseError = GraphqlResponseError;\nexports.graphql = graphql$1;\nexports.withCustomRequest = withCustomRequest;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst VERSION = \"2.17.0\";\n\nfunction ownKeys(object, enumerableOnly) {\n var keys = Object.keys(object);\n\n if (Object.getOwnPropertySymbols) {\n var symbols = Object.getOwnPropertySymbols(object);\n\n if (enumerableOnly) {\n symbols = symbols.filter(function (sym) {\n return Object.getOwnPropertyDescriptor(object, sym).enumerable;\n });\n }\n\n keys.push.apply(keys, symbols);\n }\n\n return keys;\n}\n\nfunction _objectSpread2(target) {\n for (var i = 1; i < arguments.length; i++) {\n var source = arguments[i] != null ? arguments[i] : {};\n\n if (i % 2) {\n ownKeys(Object(source), true).forEach(function (key) {\n _defineProperty(target, key, source[key]);\n });\n } else if (Object.getOwnPropertyDescriptors) {\n Object.defineProperties(target, Object.getOwnPropertyDescriptors(source));\n } else {\n ownKeys(Object(source)).forEach(function (key) {\n Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));\n });\n }\n }\n\n return target;\n}\n\nfunction _defineProperty(obj, key, value) {\n if (key in obj) {\n Object.defineProperty(obj, key, {\n value: value,\n enumerable: true,\n configurable: true,\n writable: true\n });\n } else {\n obj[key] = value;\n }\n\n return obj;\n}\n\n/**\n * Some “list” response that can be paginated have a different response structure\n *\n * They have a `total_count` key in the response (search also has `incomplete_results`,\n * /installation/repositories also has `repository_selection`), as well as a key with\n * the list of the items which name varies from endpoint to endpoint.\n *\n * Octokit normalizes these responses so that paginated results are always returned following\n * the same structure. One challenge is that if the list response has only one page, no Link\n * header is provided, so this header alone is not sufficient to check wether a response is\n * paginated or not.\n *\n * We check if a \"total_count\" key is present in the response data, but also make sure that\n * a \"url\" property is not, as the \"Get the combined status for a specific ref\" endpoint would\n * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref\n */\nfunction normalizePaginatedListResponse(response) {\n // endpoints can respond with 204 if repository is empty\n if (!response.data) {\n return _objectSpread2(_objectSpread2({}, response), {}, {\n data: []\n });\n }\n\n const responseNeedsNormalization = \"total_count\" in response.data && !(\"url\" in response.data);\n if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way\n // to retrieve the same information.\n\n const incompleteResults = response.data.incomplete_results;\n const repositorySelection = response.data.repository_selection;\n const totalCount = response.data.total_count;\n delete response.data.incomplete_results;\n delete response.data.repository_selection;\n delete response.data.total_count;\n const namespaceKey = Object.keys(response.data)[0];\n const data = response.data[namespaceKey];\n response.data = data;\n\n if (typeof incompleteResults !== \"undefined\") {\n response.data.incomplete_results = incompleteResults;\n }\n\n if (typeof repositorySelection !== \"undefined\") {\n response.data.repository_selection = repositorySelection;\n }\n\n response.data.total_count = totalCount;\n return response;\n}\n\nfunction iterator(octokit, route, parameters) {\n const options = typeof route === \"function\" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);\n const requestMethod = typeof route === \"function\" ? route : octokit.request;\n const method = options.method;\n const headers = options.headers;\n let url = options.url;\n return {\n [Symbol.asyncIterator]: () => ({\n async next() {\n if (!url) return {\n done: true\n };\n\n try {\n const response = await requestMethod({\n method,\n url,\n headers\n });\n const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format:\n // '; rel=\"next\", ; rel=\"last\"'\n // sets `url` to undefined if \"next\" URL is not present or `link` header is not set\n\n url = ((normalizedResponse.headers.link || \"\").match(/<([^>]+)>;\\s*rel=\"next\"/) || [])[1];\n return {\n value: normalizedResponse\n };\n } catch (error) {\n if (error.status !== 409) throw error;\n url = \"\";\n return {\n value: {\n status: 200,\n headers: {},\n data: []\n }\n };\n }\n }\n\n })\n };\n}\n\nfunction paginate(octokit, route, parameters, mapFn) {\n if (typeof parameters === \"function\") {\n mapFn = parameters;\n parameters = undefined;\n }\n\n return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);\n}\n\nfunction gather(octokit, results, iterator, mapFn) {\n return iterator.next().then(result => {\n if (result.done) {\n return results;\n }\n\n let earlyExit = false;\n\n function done() {\n earlyExit = true;\n }\n\n results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);\n\n if (earlyExit) {\n return results;\n }\n\n return gather(octokit, results, iterator, mapFn);\n });\n}\n\nconst composePaginateRest = Object.assign(paginate, {\n iterator\n});\n\nconst paginatingEndpoints = [\"GET /app/hook/deliveries\", \"GET /app/installations\", \"GET /applications/grants\", \"GET /authorizations\", \"GET /enterprises/{enterprise}/actions/permissions/organizations\", \"GET /enterprises/{enterprise}/actions/runner-groups\", \"GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations\", \"GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners\", \"GET /enterprises/{enterprise}/actions/runners\", \"GET /enterprises/{enterprise}/actions/runners/downloads\", \"GET /events\", \"GET /gists\", \"GET /gists/public\", \"GET /gists/starred\", \"GET /gists/{gist_id}/comments\", \"GET /gists/{gist_id}/commits\", \"GET /gists/{gist_id}/forks\", \"GET /installation/repositories\", \"GET /issues\", \"GET /marketplace_listing/plans\", \"GET /marketplace_listing/plans/{plan_id}/accounts\", \"GET /marketplace_listing/stubbed/plans\", \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\", \"GET /networks/{owner}/{repo}/events\", \"GET /notifications\", \"GET /organizations\", \"GET /orgs/{org}/actions/permissions/repositories\", \"GET /orgs/{org}/actions/runner-groups\", \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories\", \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners\", \"GET /orgs/{org}/actions/runners\", \"GET /orgs/{org}/actions/runners/downloads\", \"GET /orgs/{org}/actions/secrets\", \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\", \"GET /orgs/{org}/blocks\", \"GET /orgs/{org}/credential-authorizations\", \"GET /orgs/{org}/events\", \"GET /orgs/{org}/failed_invitations\", \"GET /orgs/{org}/hooks\", \"GET /orgs/{org}/hooks/{hook_id}/deliveries\", \"GET /orgs/{org}/installations\", \"GET /orgs/{org}/invitations\", \"GET /orgs/{org}/invitations/{invitation_id}/teams\", \"GET /orgs/{org}/issues\", \"GET /orgs/{org}/members\", \"GET /orgs/{org}/migrations\", \"GET /orgs/{org}/migrations/{migration_id}/repositories\", \"GET /orgs/{org}/outside_collaborators\", \"GET /orgs/{org}/packages\", \"GET /orgs/{org}/projects\", \"GET /orgs/{org}/public_members\", \"GET /orgs/{org}/repos\", \"GET /orgs/{org}/secret-scanning/alerts\", \"GET /orgs/{org}/team-sync/groups\", \"GET /orgs/{org}/teams\", \"GET /orgs/{org}/teams/{team_slug}/discussions\", \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\", \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\", \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\", \"GET /orgs/{org}/teams/{team_slug}/invitations\", \"GET /orgs/{org}/teams/{team_slug}/members\", \"GET /orgs/{org}/teams/{team_slug}/projects\", \"GET /orgs/{org}/teams/{team_slug}/repos\", \"GET /orgs/{org}/teams/{team_slug}/team-sync/group-mappings\", \"GET /orgs/{org}/teams/{team_slug}/teams\", \"GET /projects/columns/{column_id}/cards\", \"GET /projects/{project_id}/collaborators\", \"GET /projects/{project_id}/columns\", \"GET /repos/{owner}/{repo}/actions/artifacts\", \"GET /repos/{owner}/{repo}/actions/runners\", \"GET /repos/{owner}/{repo}/actions/runners/downloads\", \"GET /repos/{owner}/{repo}/actions/runs\", \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\", \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\", \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\", \"GET /repos/{owner}/{repo}/actions/secrets\", \"GET /repos/{owner}/{repo}/actions/workflows\", \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\", \"GET /repos/{owner}/{repo}/assignees\", \"GET /repos/{owner}/{repo}/autolinks\", \"GET /repos/{owner}/{repo}/branches\", \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\", \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\", \"GET /repos/{owner}/{repo}/code-scanning/alerts\", \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\", \"GET /repos/{owner}/{repo}/code-scanning/analyses\", \"GET /repos/{owner}/{repo}/collaborators\", \"GET /repos/{owner}/{repo}/comments\", \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\", \"GET /repos/{owner}/{repo}/commits\", \"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\", \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\", \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\", \"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\", \"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\", \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\", \"GET /repos/{owner}/{repo}/contributors\", \"GET /repos/{owner}/{repo}/deployments\", \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\", \"GET /repos/{owner}/{repo}/events\", \"GET /repos/{owner}/{repo}/forks\", \"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\", \"GET /repos/{owner}/{repo}/hooks\", \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\", \"GET /repos/{owner}/{repo}/invitations\", \"GET /repos/{owner}/{repo}/issues\", \"GET /repos/{owner}/{repo}/issues/comments\", \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\", \"GET /repos/{owner}/{repo}/issues/events\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/events\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\", \"GET /repos/{owner}/{repo}/keys\", \"GET /repos/{owner}/{repo}/labels\", \"GET /repos/{owner}/{repo}/milestones\", \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\", \"GET /repos/{owner}/{repo}/notifications\", \"GET /repos/{owner}/{repo}/pages/builds\", \"GET /repos/{owner}/{repo}/projects\", \"GET /repos/{owner}/{repo}/pulls\", \"GET /repos/{owner}/{repo}/pulls/comments\", \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\", \"GET /repos/{owner}/{repo}/releases\", \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\", \"GET /repos/{owner}/{repo}/secret-scanning/alerts\", \"GET /repos/{owner}/{repo}/stargazers\", \"GET /repos/{owner}/{repo}/subscribers\", \"GET /repos/{owner}/{repo}/tags\", \"GET /repos/{owner}/{repo}/teams\", \"GET /repositories\", \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\", \"GET /scim/v2/enterprises/{enterprise}/Groups\", \"GET /scim/v2/enterprises/{enterprise}/Users\", \"GET /scim/v2/organizations/{org}/Users\", \"GET /search/code\", \"GET /search/commits\", \"GET /search/issues\", \"GET /search/labels\", \"GET /search/repositories\", \"GET /search/topics\", \"GET /search/users\", \"GET /teams/{team_id}/discussions\", \"GET /teams/{team_id}/discussions/{discussion_number}/comments\", \"GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions\", \"GET /teams/{team_id}/discussions/{discussion_number}/reactions\", \"GET /teams/{team_id}/invitations\", \"GET /teams/{team_id}/members\", \"GET /teams/{team_id}/projects\", \"GET /teams/{team_id}/repos\", \"GET /teams/{team_id}/team-sync/group-mappings\", \"GET /teams/{team_id}/teams\", \"GET /user/blocks\", \"GET /user/emails\", \"GET /user/followers\", \"GET /user/following\", \"GET /user/gpg_keys\", \"GET /user/installations\", \"GET /user/installations/{installation_id}/repositories\", \"GET /user/issues\", \"GET /user/keys\", \"GET /user/marketplace_purchases\", \"GET /user/marketplace_purchases/stubbed\", \"GET /user/memberships/orgs\", \"GET /user/migrations\", \"GET /user/migrations/{migration_id}/repositories\", \"GET /user/orgs\", \"GET /user/packages\", \"GET /user/public_emails\", \"GET /user/repos\", \"GET /user/repository_invitations\", \"GET /user/starred\", \"GET /user/subscriptions\", \"GET /user/teams\", \"GET /users\", \"GET /users/{username}/events\", \"GET /users/{username}/events/orgs/{org}\", \"GET /users/{username}/events/public\", \"GET /users/{username}/followers\", \"GET /users/{username}/following\", \"GET /users/{username}/gists\", \"GET /users/{username}/gpg_keys\", \"GET /users/{username}/keys\", \"GET /users/{username}/orgs\", \"GET /users/{username}/packages\", \"GET /users/{username}/projects\", \"GET /users/{username}/received_events\", \"GET /users/{username}/received_events/public\", \"GET /users/{username}/repos\", \"GET /users/{username}/starred\", \"GET /users/{username}/subscriptions\"];\n\nfunction isPaginatingEndpoint(arg) {\n if (typeof arg === \"string\") {\n return paginatingEndpoints.includes(arg);\n } else {\n return false;\n }\n}\n\n/**\n * @param octokit Octokit instance\n * @param options Options passed to Octokit constructor\n */\n\nfunction paginateRest(octokit) {\n return {\n paginate: Object.assign(paginate.bind(null, octokit), {\n iterator: iterator.bind(null, octokit)\n })\n };\n}\npaginateRest.VERSION = VERSION;\n\nexports.composePaginateRest = composePaginateRest;\nexports.isPaginatingEndpoint = isPaginatingEndpoint;\nexports.paginateRest = paginateRest;\nexports.paginatingEndpoints = paginatingEndpoints;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction ownKeys(object, enumerableOnly) {\n var keys = Object.keys(object);\n\n if (Object.getOwnPropertySymbols) {\n var symbols = Object.getOwnPropertySymbols(object);\n\n if (enumerableOnly) {\n symbols = symbols.filter(function (sym) {\n return Object.getOwnPropertyDescriptor(object, sym).enumerable;\n });\n }\n\n keys.push.apply(keys, symbols);\n }\n\n return keys;\n}\n\nfunction _objectSpread2(target) {\n for (var i = 1; i < arguments.length; i++) {\n var source = arguments[i] != null ? arguments[i] : {};\n\n if (i % 2) {\n ownKeys(Object(source), true).forEach(function (key) {\n _defineProperty(target, key, source[key]);\n });\n } else if (Object.getOwnPropertyDescriptors) {\n Object.defineProperties(target, Object.getOwnPropertyDescriptors(source));\n } else {\n ownKeys(Object(source)).forEach(function (key) {\n Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));\n });\n }\n }\n\n return target;\n}\n\nfunction _defineProperty(obj, key, value) {\n if (key in obj) {\n Object.defineProperty(obj, key, {\n value: value,\n enumerable: true,\n configurable: true,\n writable: true\n });\n } else {\n obj[key] = value;\n }\n\n return obj;\n}\n\nconst Endpoints = {\n actions: {\n addSelectedRepoToOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"],\n approveWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve\"],\n cancelWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel\"],\n createOrUpdateEnvironmentSecret: [\"PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n createRegistrationTokenForOrg: [\"POST /orgs/{org}/actions/runners/registration-token\"],\n createRegistrationTokenForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/registration-token\"],\n createRemoveTokenForOrg: [\"POST /orgs/{org}/actions/runners/remove-token\"],\n createRemoveTokenForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/remove-token\"],\n createWorkflowDispatch: [\"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches\"],\n deleteArtifact: [\"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n deleteEnvironmentSecret: [\"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"],\n deleteOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}\"],\n deleteRepoSecret: [\"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n deleteSelfHostedRunnerFromOrg: [\"DELETE /orgs/{org}/actions/runners/{runner_id}\"],\n deleteSelfHostedRunnerFromRepo: [\"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}\"],\n deleteWorkflowRun: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n deleteWorkflowRunLogs: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"],\n disableSelectedRepositoryGithubActionsOrganization: [\"DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}\"],\n disableWorkflow: [\"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable\"],\n downloadArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}\"],\n downloadJobLogsForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs\"],\n downloadWorkflowRunAttemptLogs: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs\"],\n downloadWorkflowRunLogs: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"],\n enableSelectedRepositoryGithubActionsOrganization: [\"PUT /orgs/{org}/actions/permissions/repositories/{repository_id}\"],\n enableWorkflow: [\"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable\"],\n getAllowedActionsOrganization: [\"GET /orgs/{org}/actions/permissions/selected-actions\"],\n getAllowedActionsRepository: [\"GET /repos/{owner}/{repo}/actions/permissions/selected-actions\"],\n getArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n getEnvironmentPublicKey: [\"GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key\"],\n getEnvironmentSecret: [\"GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"],\n getGithubActionsPermissionsOrganization: [\"GET /orgs/{org}/actions/permissions\"],\n getGithubActionsPermissionsRepository: [\"GET /repos/{owner}/{repo}/actions/permissions\"],\n getJobForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/actions/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}\"],\n getPendingDeploymentsForRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"],\n getRepoPermissions: [\"GET /repos/{owner}/{repo}/actions/permissions\", {}, {\n renamed: [\"actions\", \"getGithubActionsPermissionsRepository\"]\n }],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/actions/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n getReviewsForRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals\"],\n getSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}\"],\n getSelfHostedRunnerForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/{runner_id}\"],\n getWorkflow: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}\"],\n getWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n getWorkflowRunAttempt: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}\"],\n getWorkflowRunUsage: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing\"],\n getWorkflowUsage: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing\"],\n listArtifactsForRepo: [\"GET /repos/{owner}/{repo}/actions/artifacts\"],\n listEnvironmentSecrets: [\"GET /repositories/{repository_id}/environments/{environment_name}/secrets\"],\n listJobsForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\"],\n listJobsForWorkflowRunAttempt: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\"],\n listOrgSecrets: [\"GET /orgs/{org}/actions/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/actions/secrets\"],\n listRepoWorkflows: [\"GET /repos/{owner}/{repo}/actions/workflows\"],\n listRunnerApplicationsForOrg: [\"GET /orgs/{org}/actions/runners/downloads\"],\n listRunnerApplicationsForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/downloads\"],\n listSelectedReposForOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\"],\n listSelectedRepositoriesEnabledGithubActionsOrganization: [\"GET /orgs/{org}/actions/permissions/repositories\"],\n listSelfHostedRunnersForOrg: [\"GET /orgs/{org}/actions/runners\"],\n listSelfHostedRunnersForRepo: [\"GET /repos/{owner}/{repo}/actions/runners\"],\n listWorkflowRunArtifacts: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\"],\n listWorkflowRuns: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\"],\n listWorkflowRunsForRepo: [\"GET /repos/{owner}/{repo}/actions/runs\"],\n removeSelectedRepoFromOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"],\n reviewPendingDeploymentsForRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"],\n setAllowedActionsOrganization: [\"PUT /orgs/{org}/actions/permissions/selected-actions\"],\n setAllowedActionsRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions/selected-actions\"],\n setGithubActionsPermissionsOrganization: [\"PUT /orgs/{org}/actions/permissions\"],\n setGithubActionsPermissionsRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions\"],\n setSelectedReposForOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories\"],\n setSelectedRepositoriesEnabledGithubActionsOrganization: [\"PUT /orgs/{org}/actions/permissions/repositories\"]\n },\n activity: {\n checkRepoIsStarredByAuthenticatedUser: [\"GET /user/starred/{owner}/{repo}\"],\n deleteRepoSubscription: [\"DELETE /repos/{owner}/{repo}/subscription\"],\n deleteThreadSubscription: [\"DELETE /notifications/threads/{thread_id}/subscription\"],\n getFeeds: [\"GET /feeds\"],\n getRepoSubscription: [\"GET /repos/{owner}/{repo}/subscription\"],\n getThread: [\"GET /notifications/threads/{thread_id}\"],\n getThreadSubscriptionForAuthenticatedUser: [\"GET /notifications/threads/{thread_id}/subscription\"],\n listEventsForAuthenticatedUser: [\"GET /users/{username}/events\"],\n listNotificationsForAuthenticatedUser: [\"GET /notifications\"],\n listOrgEventsForAuthenticatedUser: [\"GET /users/{username}/events/orgs/{org}\"],\n listPublicEvents: [\"GET /events\"],\n listPublicEventsForRepoNetwork: [\"GET /networks/{owner}/{repo}/events\"],\n listPublicEventsForUser: [\"GET /users/{username}/events/public\"],\n listPublicOrgEvents: [\"GET /orgs/{org}/events\"],\n listReceivedEventsForUser: [\"GET /users/{username}/received_events\"],\n listReceivedPublicEventsForUser: [\"GET /users/{username}/received_events/public\"],\n listRepoEvents: [\"GET /repos/{owner}/{repo}/events\"],\n listRepoNotificationsForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/notifications\"],\n listReposStarredByAuthenticatedUser: [\"GET /user/starred\"],\n listReposStarredByUser: [\"GET /users/{username}/starred\"],\n listReposWatchedByUser: [\"GET /users/{username}/subscriptions\"],\n listStargazersForRepo: [\"GET /repos/{owner}/{repo}/stargazers\"],\n listWatchedReposForAuthenticatedUser: [\"GET /user/subscriptions\"],\n listWatchersForRepo: [\"GET /repos/{owner}/{repo}/subscribers\"],\n markNotificationsAsRead: [\"PUT /notifications\"],\n markRepoNotificationsAsRead: [\"PUT /repos/{owner}/{repo}/notifications\"],\n markThreadAsRead: [\"PATCH /notifications/threads/{thread_id}\"],\n setRepoSubscription: [\"PUT /repos/{owner}/{repo}/subscription\"],\n setThreadSubscription: [\"PUT /notifications/threads/{thread_id}/subscription\"],\n starRepoForAuthenticatedUser: [\"PUT /user/starred/{owner}/{repo}\"],\n unstarRepoForAuthenticatedUser: [\"DELETE /user/starred/{owner}/{repo}\"]\n },\n apps: {\n addRepoToInstallation: [\"PUT /user/installations/{installation_id}/repositories/{repository_id}\", {}, {\n renamed: [\"apps\", \"addRepoToInstallationForAuthenticatedUser\"]\n }],\n addRepoToInstallationForAuthenticatedUser: [\"PUT /user/installations/{installation_id}/repositories/{repository_id}\"],\n checkToken: [\"POST /applications/{client_id}/token\"],\n createContentAttachment: [\"POST /content_references/{content_reference_id}/attachments\", {\n mediaType: {\n previews: [\"corsair\"]\n }\n }],\n createContentAttachmentForRepo: [\"POST /repos/{owner}/{repo}/content_references/{content_reference_id}/attachments\", {\n mediaType: {\n previews: [\"corsair\"]\n }\n }],\n createFromManifest: [\"POST /app-manifests/{code}/conversions\"],\n createInstallationAccessToken: [\"POST /app/installations/{installation_id}/access_tokens\"],\n deleteAuthorization: [\"DELETE /applications/{client_id}/grant\"],\n deleteInstallation: [\"DELETE /app/installations/{installation_id}\"],\n deleteToken: [\"DELETE /applications/{client_id}/token\"],\n getAuthenticated: [\"GET /app\"],\n getBySlug: [\"GET /apps/{app_slug}\"],\n getInstallation: [\"GET /app/installations/{installation_id}\"],\n getOrgInstallation: [\"GET /orgs/{org}/installation\"],\n getRepoInstallation: [\"GET /repos/{owner}/{repo}/installation\"],\n getSubscriptionPlanForAccount: [\"GET /marketplace_listing/accounts/{account_id}\"],\n getSubscriptionPlanForAccountStubbed: [\"GET /marketplace_listing/stubbed/accounts/{account_id}\"],\n getUserInstallation: [\"GET /users/{username}/installation\"],\n getWebhookConfigForApp: [\"GET /app/hook/config\"],\n getWebhookDelivery: [\"GET /app/hook/deliveries/{delivery_id}\"],\n listAccountsForPlan: [\"GET /marketplace_listing/plans/{plan_id}/accounts\"],\n listAccountsForPlanStubbed: [\"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\"],\n listInstallationReposForAuthenticatedUser: [\"GET /user/installations/{installation_id}/repositories\"],\n listInstallations: [\"GET /app/installations\"],\n listInstallationsForAuthenticatedUser: [\"GET /user/installations\"],\n listPlans: [\"GET /marketplace_listing/plans\"],\n listPlansStubbed: [\"GET /marketplace_listing/stubbed/plans\"],\n listReposAccessibleToInstallation: [\"GET /installation/repositories\"],\n listSubscriptionsForAuthenticatedUser: [\"GET /user/marketplace_purchases\"],\n listSubscriptionsForAuthenticatedUserStubbed: [\"GET /user/marketplace_purchases/stubbed\"],\n listWebhookDeliveries: [\"GET /app/hook/deliveries\"],\n redeliverWebhookDelivery: [\"POST /app/hook/deliveries/{delivery_id}/attempts\"],\n removeRepoFromInstallation: [\"DELETE /user/installations/{installation_id}/repositories/{repository_id}\", {}, {\n renamed: [\"apps\", \"removeRepoFromInstallationForAuthenticatedUser\"]\n }],\n removeRepoFromInstallationForAuthenticatedUser: [\"DELETE /user/installations/{installation_id}/repositories/{repository_id}\"],\n resetToken: [\"PATCH /applications/{client_id}/token\"],\n revokeInstallationAccessToken: [\"DELETE /installation/token\"],\n scopeToken: [\"POST /applications/{client_id}/token/scoped\"],\n suspendInstallation: [\"PUT /app/installations/{installation_id}/suspended\"],\n unsuspendInstallation: [\"DELETE /app/installations/{installation_id}/suspended\"],\n updateWebhookConfigForApp: [\"PATCH /app/hook/config\"]\n },\n billing: {\n getGithubActionsBillingOrg: [\"GET /orgs/{org}/settings/billing/actions\"],\n getGithubActionsBillingUser: [\"GET /users/{username}/settings/billing/actions\"],\n getGithubPackagesBillingOrg: [\"GET /orgs/{org}/settings/billing/packages\"],\n getGithubPackagesBillingUser: [\"GET /users/{username}/settings/billing/packages\"],\n getSharedStorageBillingOrg: [\"GET /orgs/{org}/settings/billing/shared-storage\"],\n getSharedStorageBillingUser: [\"GET /users/{username}/settings/billing/shared-storage\"]\n },\n checks: {\n create: [\"POST /repos/{owner}/{repo}/check-runs\"],\n createSuite: [\"POST /repos/{owner}/{repo}/check-suites\"],\n get: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n getSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}\"],\n listAnnotations: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\"],\n listForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\"],\n listForSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\"],\n listSuitesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\"],\n rerequestRun: [\"POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest\"],\n rerequestSuite: [\"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest\"],\n setSuitesPreferences: [\"PATCH /repos/{owner}/{repo}/check-suites/preferences\"],\n update: [\"PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}\"]\n },\n codeScanning: {\n deleteAnalysis: [\"DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}\"],\n getAlert: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\", {}, {\n renamedParameters: {\n alert_id: \"alert_number\"\n }\n }],\n getAnalysis: [\"GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}\"],\n getSarif: [\"GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}\"],\n listAlertInstances: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/code-scanning/alerts\"],\n listAlertsInstances: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\", {}, {\n renamed: [\"codeScanning\", \"listAlertInstances\"]\n }],\n listRecentAnalyses: [\"GET /repos/{owner}/{repo}/code-scanning/analyses\"],\n updateAlert: [\"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\"],\n uploadSarif: [\"POST /repos/{owner}/{repo}/code-scanning/sarifs\"]\n },\n codesOfConduct: {\n getAllCodesOfConduct: [\"GET /codes_of_conduct\"],\n getConductCode: [\"GET /codes_of_conduct/{key}\"]\n },\n emojis: {\n get: [\"GET /emojis\"]\n },\n enterpriseAdmin: {\n disableSelectedOrganizationGithubActionsEnterprise: [\"DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\"],\n enableSelectedOrganizationGithubActionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\"],\n getAllowedActionsEnterprise: [\"GET /enterprises/{enterprise}/actions/permissions/selected-actions\"],\n getGithubActionsPermissionsEnterprise: [\"GET /enterprises/{enterprise}/actions/permissions\"],\n listSelectedOrganizationsEnabledGithubActionsEnterprise: [\"GET /enterprises/{enterprise}/actions/permissions/organizations\"],\n setAllowedActionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/selected-actions\"],\n setGithubActionsPermissionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions\"],\n setSelectedOrganizationsEnabledGithubActionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/organizations\"]\n },\n gists: {\n checkIsStarred: [\"GET /gists/{gist_id}/star\"],\n create: [\"POST /gists\"],\n createComment: [\"POST /gists/{gist_id}/comments\"],\n delete: [\"DELETE /gists/{gist_id}\"],\n deleteComment: [\"DELETE /gists/{gist_id}/comments/{comment_id}\"],\n fork: [\"POST /gists/{gist_id}/forks\"],\n get: [\"GET /gists/{gist_id}\"],\n getComment: [\"GET /gists/{gist_id}/comments/{comment_id}\"],\n getRevision: [\"GET /gists/{gist_id}/{sha}\"],\n list: [\"GET /gists\"],\n listComments: [\"GET /gists/{gist_id}/comments\"],\n listCommits: [\"GET /gists/{gist_id}/commits\"],\n listForUser: [\"GET /users/{username}/gists\"],\n listForks: [\"GET /gists/{gist_id}/forks\"],\n listPublic: [\"GET /gists/public\"],\n listStarred: [\"GET /gists/starred\"],\n star: [\"PUT /gists/{gist_id}/star\"],\n unstar: [\"DELETE /gists/{gist_id}/star\"],\n update: [\"PATCH /gists/{gist_id}\"],\n updateComment: [\"PATCH /gists/{gist_id}/comments/{comment_id}\"]\n },\n git: {\n createBlob: [\"POST /repos/{owner}/{repo}/git/blobs\"],\n createCommit: [\"POST /repos/{owner}/{repo}/git/commits\"],\n createRef: [\"POST /repos/{owner}/{repo}/git/refs\"],\n createTag: [\"POST /repos/{owner}/{repo}/git/tags\"],\n createTree: [\"POST /repos/{owner}/{repo}/git/trees\"],\n deleteRef: [\"DELETE /repos/{owner}/{repo}/git/refs/{ref}\"],\n getBlob: [\"GET /repos/{owner}/{repo}/git/blobs/{file_sha}\"],\n getCommit: [\"GET /repos/{owner}/{repo}/git/commits/{commit_sha}\"],\n getRef: [\"GET /repos/{owner}/{repo}/git/ref/{ref}\"],\n getTag: [\"GET /repos/{owner}/{repo}/git/tags/{tag_sha}\"],\n getTree: [\"GET /repos/{owner}/{repo}/git/trees/{tree_sha}\"],\n listMatchingRefs: [\"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\"],\n updateRef: [\"PATCH /repos/{owner}/{repo}/git/refs/{ref}\"]\n },\n gitignore: {\n getAllTemplates: [\"GET /gitignore/templates\"],\n getTemplate: [\"GET /gitignore/templates/{name}\"]\n },\n interactions: {\n getRestrictionsForAuthenticatedUser: [\"GET /user/interaction-limits\"],\n getRestrictionsForOrg: [\"GET /orgs/{org}/interaction-limits\"],\n getRestrictionsForRepo: [\"GET /repos/{owner}/{repo}/interaction-limits\"],\n getRestrictionsForYourPublicRepos: [\"GET /user/interaction-limits\", {}, {\n renamed: [\"interactions\", \"getRestrictionsForAuthenticatedUser\"]\n }],\n removeRestrictionsForAuthenticatedUser: [\"DELETE /user/interaction-limits\"],\n removeRestrictionsForOrg: [\"DELETE /orgs/{org}/interaction-limits\"],\n removeRestrictionsForRepo: [\"DELETE /repos/{owner}/{repo}/interaction-limits\"],\n removeRestrictionsForYourPublicRepos: [\"DELETE /user/interaction-limits\", {}, {\n renamed: [\"interactions\", \"removeRestrictionsForAuthenticatedUser\"]\n }],\n setRestrictionsForAuthenticatedUser: [\"PUT /user/interaction-limits\"],\n setRestrictionsForOrg: [\"PUT /orgs/{org}/interaction-limits\"],\n setRestrictionsForRepo: [\"PUT /repos/{owner}/{repo}/interaction-limits\"],\n setRestrictionsForYourPublicRepos: [\"PUT /user/interaction-limits\", {}, {\n renamed: [\"interactions\", \"setRestrictionsForAuthenticatedUser\"]\n }]\n },\n issues: {\n addAssignees: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees\"],\n addLabels: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n checkUserCanBeAssigned: [\"GET /repos/{owner}/{repo}/assignees/{assignee}\"],\n create: [\"POST /repos/{owner}/{repo}/issues\"],\n createComment: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n createLabel: [\"POST /repos/{owner}/{repo}/labels\"],\n createMilestone: [\"POST /repos/{owner}/{repo}/milestones\"],\n deleteComment: [\"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n deleteLabel: [\"DELETE /repos/{owner}/{repo}/labels/{name}\"],\n deleteMilestone: [\"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n get: [\"GET /repos/{owner}/{repo}/issues/{issue_number}\"],\n getComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n getEvent: [\"GET /repos/{owner}/{repo}/issues/events/{event_id}\"],\n getLabel: [\"GET /repos/{owner}/{repo}/labels/{name}\"],\n getMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n list: [\"GET /issues\"],\n listAssignees: [\"GET /repos/{owner}/{repo}/assignees\"],\n listComments: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n listCommentsForRepo: [\"GET /repos/{owner}/{repo}/issues/comments\"],\n listEvents: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/events\"],\n listEventsForRepo: [\"GET /repos/{owner}/{repo}/issues/events\"],\n listEventsForTimeline: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\"],\n listForAuthenticatedUser: [\"GET /user/issues\"],\n listForOrg: [\"GET /orgs/{org}/issues\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/issues\"],\n listLabelsForMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\"],\n listLabelsForRepo: [\"GET /repos/{owner}/{repo}/labels\"],\n listLabelsOnIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n listMilestones: [\"GET /repos/{owner}/{repo}/milestones\"],\n lock: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n removeAllLabels: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n removeAssignees: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees\"],\n removeLabel: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}\"],\n setLabels: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n unlock: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n update: [\"PATCH /repos/{owner}/{repo}/issues/{issue_number}\"],\n updateComment: [\"PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n updateLabel: [\"PATCH /repos/{owner}/{repo}/labels/{name}\"],\n updateMilestone: [\"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}\"]\n },\n licenses: {\n get: [\"GET /licenses/{license}\"],\n getAllCommonlyUsed: [\"GET /licenses\"],\n getForRepo: [\"GET /repos/{owner}/{repo}/license\"]\n },\n markdown: {\n render: [\"POST /markdown\"],\n renderRaw: [\"POST /markdown/raw\", {\n headers: {\n \"content-type\": \"text/plain; charset=utf-8\"\n }\n }]\n },\n meta: {\n get: [\"GET /meta\"],\n getOctocat: [\"GET /octocat\"],\n getZen: [\"GET /zen\"],\n root: [\"GET /\"]\n },\n migrations: {\n cancelImport: [\"DELETE /repos/{owner}/{repo}/import\"],\n deleteArchiveForAuthenticatedUser: [\"DELETE /user/migrations/{migration_id}/archive\"],\n deleteArchiveForOrg: [\"DELETE /orgs/{org}/migrations/{migration_id}/archive\"],\n downloadArchiveForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/archive\"],\n getArchiveForAuthenticatedUser: [\"GET /user/migrations/{migration_id}/archive\"],\n getCommitAuthors: [\"GET /repos/{owner}/{repo}/import/authors\"],\n getImportStatus: [\"GET /repos/{owner}/{repo}/import\"],\n getLargeFiles: [\"GET /repos/{owner}/{repo}/import/large_files\"],\n getStatusForAuthenticatedUser: [\"GET /user/migrations/{migration_id}\"],\n getStatusForOrg: [\"GET /orgs/{org}/migrations/{migration_id}\"],\n listForAuthenticatedUser: [\"GET /user/migrations\"],\n listForOrg: [\"GET /orgs/{org}/migrations\"],\n listReposForAuthenticatedUser: [\"GET /user/migrations/{migration_id}/repositories\"],\n listReposForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/repositories\"],\n listReposForUser: [\"GET /user/migrations/{migration_id}/repositories\", {}, {\n renamed: [\"migrations\", \"listReposForAuthenticatedUser\"]\n }],\n mapCommitAuthor: [\"PATCH /repos/{owner}/{repo}/import/authors/{author_id}\"],\n setLfsPreference: [\"PATCH /repos/{owner}/{repo}/import/lfs\"],\n startForAuthenticatedUser: [\"POST /user/migrations\"],\n startForOrg: [\"POST /orgs/{org}/migrations\"],\n startImport: [\"PUT /repos/{owner}/{repo}/import\"],\n unlockRepoForAuthenticatedUser: [\"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock\"],\n unlockRepoForOrg: [\"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock\"],\n updateImport: [\"PATCH /repos/{owner}/{repo}/import\"]\n },\n orgs: {\n blockUser: [\"PUT /orgs/{org}/blocks/{username}\"],\n cancelInvitation: [\"DELETE /orgs/{org}/invitations/{invitation_id}\"],\n checkBlockedUser: [\"GET /orgs/{org}/blocks/{username}\"],\n checkMembershipForUser: [\"GET /orgs/{org}/members/{username}\"],\n checkPublicMembershipForUser: [\"GET /orgs/{org}/public_members/{username}\"],\n convertMemberToOutsideCollaborator: [\"PUT /orgs/{org}/outside_collaborators/{username}\"],\n createInvitation: [\"POST /orgs/{org}/invitations\"],\n createWebhook: [\"POST /orgs/{org}/hooks\"],\n deleteWebhook: [\"DELETE /orgs/{org}/hooks/{hook_id}\"],\n get: [\"GET /orgs/{org}\"],\n getMembershipForAuthenticatedUser: [\"GET /user/memberships/orgs/{org}\"],\n getMembershipForUser: [\"GET /orgs/{org}/memberships/{username}\"],\n getWebhook: [\"GET /orgs/{org}/hooks/{hook_id}\"],\n getWebhookConfigForOrg: [\"GET /orgs/{org}/hooks/{hook_id}/config\"],\n getWebhookDelivery: [\"GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}\"],\n list: [\"GET /organizations\"],\n listAppInstallations: [\"GET /orgs/{org}/installations\"],\n listBlockedUsers: [\"GET /orgs/{org}/blocks\"],\n listFailedInvitations: [\"GET /orgs/{org}/failed_invitations\"],\n listForAuthenticatedUser: [\"GET /user/orgs\"],\n listForUser: [\"GET /users/{username}/orgs\"],\n listInvitationTeams: [\"GET /orgs/{org}/invitations/{invitation_id}/teams\"],\n listMembers: [\"GET /orgs/{org}/members\"],\n listMembershipsForAuthenticatedUser: [\"GET /user/memberships/orgs\"],\n listOutsideCollaborators: [\"GET /orgs/{org}/outside_collaborators\"],\n listPendingInvitations: [\"GET /orgs/{org}/invitations\"],\n listPublicMembers: [\"GET /orgs/{org}/public_members\"],\n listWebhookDeliveries: [\"GET /orgs/{org}/hooks/{hook_id}/deliveries\"],\n listWebhooks: [\"GET /orgs/{org}/hooks\"],\n pingWebhook: [\"POST /orgs/{org}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\"POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\"],\n removeMember: [\"DELETE /orgs/{org}/members/{username}\"],\n removeMembershipForUser: [\"DELETE /orgs/{org}/memberships/{username}\"],\n removeOutsideCollaborator: [\"DELETE /orgs/{org}/outside_collaborators/{username}\"],\n removePublicMembershipForAuthenticatedUser: [\"DELETE /orgs/{org}/public_members/{username}\"],\n setMembershipForUser: [\"PUT /orgs/{org}/memberships/{username}\"],\n setPublicMembershipForAuthenticatedUser: [\"PUT /orgs/{org}/public_members/{username}\"],\n unblockUser: [\"DELETE /orgs/{org}/blocks/{username}\"],\n update: [\"PATCH /orgs/{org}\"],\n updateMembershipForAuthenticatedUser: [\"PATCH /user/memberships/orgs/{org}\"],\n updateWebhook: [\"PATCH /orgs/{org}/hooks/{hook_id}\"],\n updateWebhookConfigForOrg: [\"PATCH /orgs/{org}/hooks/{hook_id}/config\"]\n },\n packages: {\n deletePackageForAuthenticatedUser: [\"DELETE /user/packages/{package_type}/{package_name}\"],\n deletePackageForOrg: [\"DELETE /orgs/{org}/packages/{package_type}/{package_name}\"],\n deletePackageForUser: [\"DELETE /users/{username}/packages/{package_type}/{package_name}\"],\n deletePackageVersionForAuthenticatedUser: [\"DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n deletePackageVersionForOrg: [\"DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n deletePackageVersionForUser: [\"DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n getAllPackageVersionsForAPackageOwnedByAnOrg: [\"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\", {}, {\n renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByOrg\"]\n }],\n getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}/versions\", {}, {\n renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByAuthenticatedUser\"]\n }],\n getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}/versions\"],\n getAllPackageVersionsForPackageOwnedByOrg: [\"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\"],\n getAllPackageVersionsForPackageOwnedByUser: [\"GET /users/{username}/packages/{package_type}/{package_name}/versions\"],\n getPackageForAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}\"],\n getPackageForOrganization: [\"GET /orgs/{org}/packages/{package_type}/{package_name}\"],\n getPackageForUser: [\"GET /users/{username}/packages/{package_type}/{package_name}\"],\n getPackageVersionForAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n getPackageVersionForOrganization: [\"GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n getPackageVersionForUser: [\"GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n listPackagesForAuthenticatedUser: [\"GET /user/packages\"],\n listPackagesForOrganization: [\"GET /orgs/{org}/packages\"],\n listPackagesForUser: [\"GET /users/{username}/packages\"],\n restorePackageForAuthenticatedUser: [\"POST /user/packages/{package_type}/{package_name}/restore{?token}\"],\n restorePackageForOrg: [\"POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}\"],\n restorePackageForUser: [\"POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}\"],\n restorePackageVersionForAuthenticatedUser: [\"POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"],\n restorePackageVersionForOrg: [\"POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"],\n restorePackageVersionForUser: [\"POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"]\n },\n projects: {\n addCollaborator: [\"PUT /projects/{project_id}/collaborators/{username}\"],\n createCard: [\"POST /projects/columns/{column_id}/cards\"],\n createColumn: [\"POST /projects/{project_id}/columns\"],\n createForAuthenticatedUser: [\"POST /user/projects\"],\n createForOrg: [\"POST /orgs/{org}/projects\"],\n createForRepo: [\"POST /repos/{owner}/{repo}/projects\"],\n delete: [\"DELETE /projects/{project_id}\"],\n deleteCard: [\"DELETE /projects/columns/cards/{card_id}\"],\n deleteColumn: [\"DELETE /projects/columns/{column_id}\"],\n get: [\"GET /projects/{project_id}\"],\n getCard: [\"GET /projects/columns/cards/{card_id}\"],\n getColumn: [\"GET /projects/columns/{column_id}\"],\n getPermissionForUser: [\"GET /projects/{project_id}/collaborators/{username}/permission\"],\n listCards: [\"GET /projects/columns/{column_id}/cards\"],\n listCollaborators: [\"GET /projects/{project_id}/collaborators\"],\n listColumns: [\"GET /projects/{project_id}/columns\"],\n listForOrg: [\"GET /orgs/{org}/projects\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/projects\"],\n listForUser: [\"GET /users/{username}/projects\"],\n moveCard: [\"POST /projects/columns/cards/{card_id}/moves\"],\n moveColumn: [\"POST /projects/columns/{column_id}/moves\"],\n removeCollaborator: [\"DELETE /projects/{project_id}/collaborators/{username}\"],\n update: [\"PATCH /projects/{project_id}\"],\n updateCard: [\"PATCH /projects/columns/cards/{card_id}\"],\n updateColumn: [\"PATCH /projects/columns/{column_id}\"]\n },\n pulls: {\n checkIfMerged: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n create: [\"POST /repos/{owner}/{repo}/pulls\"],\n createReplyForReviewComment: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\"],\n createReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n createReviewComment: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments\"],\n deletePendingReview: [\"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n deleteReviewComment: [\"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n dismissReview: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals\"],\n get: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}\"],\n getReview: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n getReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n list: [\"GET /repos/{owner}/{repo}/pulls\"],\n listCommentsForReview: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\"],\n listCommits: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\"],\n listFiles: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\"],\n listRequestedReviewers: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n listReviewComments: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\"],\n listReviewCommentsForRepo: [\"GET /repos/{owner}/{repo}/pulls/comments\"],\n listReviews: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n merge: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n removeRequestedReviewers: [\"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n requestReviewers: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n submitReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events\"],\n update: [\"PATCH /repos/{owner}/{repo}/pulls/{pull_number}\"],\n updateBranch: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch\"],\n updateReview: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n updateReviewComment: [\"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}\"]\n },\n rateLimit: {\n get: [\"GET /rate_limit\"]\n },\n reactions: {\n createForCommitComment: [\"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions\"],\n createForIssue: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions\"],\n createForIssueComment: [\"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\"],\n createForPullRequestReviewComment: [\"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\"],\n createForRelease: [\"POST /repos/{owner}/{repo}/releases/{release_id}/reactions\"],\n createForTeamDiscussionCommentInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\"],\n createForTeamDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\"],\n deleteForCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}\"],\n deleteForIssue: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}\"],\n deleteForIssueComment: [\"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}\"],\n deleteForPullRequestComment: [\"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}\"],\n deleteForTeamDiscussion: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}\"],\n deleteForTeamDiscussionComment: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}\"],\n listForCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\"],\n listForIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\"],\n listForIssueComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\"],\n listForPullRequestReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\"],\n listForTeamDiscussionCommentInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\"],\n listForTeamDiscussionInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\"]\n },\n repos: {\n acceptInvitation: [\"PATCH /user/repository_invitations/{invitation_id}\", {}, {\n renamed: [\"repos\", \"acceptInvitationForAuthenticatedUser\"]\n }],\n acceptInvitationForAuthenticatedUser: [\"PATCH /user/repository_invitations/{invitation_id}\"],\n addAppAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n addCollaborator: [\"PUT /repos/{owner}/{repo}/collaborators/{username}\"],\n addStatusCheckContexts: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n addTeamAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n addUserAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n checkCollaborator: [\"GET /repos/{owner}/{repo}/collaborators/{username}\"],\n checkVulnerabilityAlerts: [\"GET /repos/{owner}/{repo}/vulnerability-alerts\"],\n compareCommits: [\"GET /repos/{owner}/{repo}/compare/{base}...{head}\"],\n compareCommitsWithBasehead: [\"GET /repos/{owner}/{repo}/compare/{basehead}\"],\n createAutolink: [\"POST /repos/{owner}/{repo}/autolinks\"],\n createCommitComment: [\"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments\"],\n createCommitSignatureProtection: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"],\n createCommitStatus: [\"POST /repos/{owner}/{repo}/statuses/{sha}\"],\n createDeployKey: [\"POST /repos/{owner}/{repo}/keys\"],\n createDeployment: [\"POST /repos/{owner}/{repo}/deployments\"],\n createDeploymentStatus: [\"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"],\n createDispatchEvent: [\"POST /repos/{owner}/{repo}/dispatches\"],\n createForAuthenticatedUser: [\"POST /user/repos\"],\n createFork: [\"POST /repos/{owner}/{repo}/forks\"],\n createInOrg: [\"POST /orgs/{org}/repos\"],\n createOrUpdateEnvironment: [\"PUT /repos/{owner}/{repo}/environments/{environment_name}\"],\n createOrUpdateFileContents: [\"PUT /repos/{owner}/{repo}/contents/{path}\"],\n createPagesSite: [\"POST /repos/{owner}/{repo}/pages\"],\n createRelease: [\"POST /repos/{owner}/{repo}/releases\"],\n createUsingTemplate: [\"POST /repos/{template_owner}/{template_repo}/generate\"],\n createWebhook: [\"POST /repos/{owner}/{repo}/hooks\"],\n declineInvitation: [\"DELETE /user/repository_invitations/{invitation_id}\", {}, {\n renamed: [\"repos\", \"declineInvitationForAuthenticatedUser\"]\n }],\n declineInvitationForAuthenticatedUser: [\"DELETE /user/repository_invitations/{invitation_id}\"],\n delete: [\"DELETE /repos/{owner}/{repo}\"],\n deleteAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"],\n deleteAdminBranchProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n deleteAnEnvironment: [\"DELETE /repos/{owner}/{repo}/environments/{environment_name}\"],\n deleteAutolink: [\"DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n deleteBranchProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection\"],\n deleteCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}\"],\n deleteCommitSignatureProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"],\n deleteDeployKey: [\"DELETE /repos/{owner}/{repo}/keys/{key_id}\"],\n deleteDeployment: [\"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n deleteFile: [\"DELETE /repos/{owner}/{repo}/contents/{path}\"],\n deleteInvitation: [\"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}\"],\n deletePagesSite: [\"DELETE /repos/{owner}/{repo}/pages\"],\n deletePullRequestReviewProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n deleteRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}\"],\n deleteReleaseAsset: [\"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n deleteWebhook: [\"DELETE /repos/{owner}/{repo}/hooks/{hook_id}\"],\n disableAutomatedSecurityFixes: [\"DELETE /repos/{owner}/{repo}/automated-security-fixes\"],\n disableLfsForRepo: [\"DELETE /repos/{owner}/{repo}/lfs\"],\n disableVulnerabilityAlerts: [\"DELETE /repos/{owner}/{repo}/vulnerability-alerts\"],\n downloadArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\", {}, {\n renamed: [\"repos\", \"downloadZipballArchive\"]\n }],\n downloadTarballArchive: [\"GET /repos/{owner}/{repo}/tarball/{ref}\"],\n downloadZipballArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\"],\n enableAutomatedSecurityFixes: [\"PUT /repos/{owner}/{repo}/automated-security-fixes\"],\n enableLfsForRepo: [\"PUT /repos/{owner}/{repo}/lfs\"],\n enableVulnerabilityAlerts: [\"PUT /repos/{owner}/{repo}/vulnerability-alerts\"],\n generateReleaseNotes: [\"POST /repos/{owner}/{repo}/releases/generate-notes\"],\n get: [\"GET /repos/{owner}/{repo}\"],\n getAccessRestrictions: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"],\n getAdminBranchProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n getAllEnvironments: [\"GET /repos/{owner}/{repo}/environments\"],\n getAllStatusCheckContexts: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\"],\n getAllTopics: [\"GET /repos/{owner}/{repo}/topics\", {\n mediaType: {\n previews: [\"mercy\"]\n }\n }],\n getAppsWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\"],\n getAutolink: [\"GET /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n getBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}\"],\n getBranchProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection\"],\n getClones: [\"GET /repos/{owner}/{repo}/traffic/clones\"],\n getCodeFrequencyStats: [\"GET /repos/{owner}/{repo}/stats/code_frequency\"],\n getCollaboratorPermissionLevel: [\"GET /repos/{owner}/{repo}/collaborators/{username}/permission\"],\n getCombinedStatusForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/status\"],\n getCommit: [\"GET /repos/{owner}/{repo}/commits/{ref}\"],\n getCommitActivityStats: [\"GET /repos/{owner}/{repo}/stats/commit_activity\"],\n getCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}\"],\n getCommitSignatureProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"],\n getCommunityProfileMetrics: [\"GET /repos/{owner}/{repo}/community/profile\"],\n getContent: [\"GET /repos/{owner}/{repo}/contents/{path}\"],\n getContributorsStats: [\"GET /repos/{owner}/{repo}/stats/contributors\"],\n getDeployKey: [\"GET /repos/{owner}/{repo}/keys/{key_id}\"],\n getDeployment: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n getDeploymentStatus: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}\"],\n getEnvironment: [\"GET /repos/{owner}/{repo}/environments/{environment_name}\"],\n getLatestPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/latest\"],\n getLatestRelease: [\"GET /repos/{owner}/{repo}/releases/latest\"],\n getPages: [\"GET /repos/{owner}/{repo}/pages\"],\n getPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/{build_id}\"],\n getPagesHealthCheck: [\"GET /repos/{owner}/{repo}/pages/health\"],\n getParticipationStats: [\"GET /repos/{owner}/{repo}/stats/participation\"],\n getPullRequestReviewProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n getPunchCardStats: [\"GET /repos/{owner}/{repo}/stats/punch_card\"],\n getReadme: [\"GET /repos/{owner}/{repo}/readme\"],\n getReadmeInDirectory: [\"GET /repos/{owner}/{repo}/readme/{dir}\"],\n getRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}\"],\n getReleaseAsset: [\"GET /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n getReleaseByTag: [\"GET /repos/{owner}/{repo}/releases/tags/{tag}\"],\n getStatusChecksProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n getTeamsWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\"],\n getTopPaths: [\"GET /repos/{owner}/{repo}/traffic/popular/paths\"],\n getTopReferrers: [\"GET /repos/{owner}/{repo}/traffic/popular/referrers\"],\n getUsersWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\"],\n getViews: [\"GET /repos/{owner}/{repo}/traffic/views\"],\n getWebhook: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}\"],\n getWebhookConfigForRepo: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}/config\"],\n getWebhookDelivery: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}\"],\n listAutolinks: [\"GET /repos/{owner}/{repo}/autolinks\"],\n listBranches: [\"GET /repos/{owner}/{repo}/branches\"],\n listBranchesForHeadCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\"],\n listCollaborators: [\"GET /repos/{owner}/{repo}/collaborators\"],\n listCommentsForCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\"],\n listCommitCommentsForRepo: [\"GET /repos/{owner}/{repo}/comments\"],\n listCommitStatusesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/statuses\"],\n listCommits: [\"GET /repos/{owner}/{repo}/commits\"],\n listContributors: [\"GET /repos/{owner}/{repo}/contributors\"],\n listDeployKeys: [\"GET /repos/{owner}/{repo}/keys\"],\n listDeploymentStatuses: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"],\n listDeployments: [\"GET /repos/{owner}/{repo}/deployments\"],\n listForAuthenticatedUser: [\"GET /user/repos\"],\n listForOrg: [\"GET /orgs/{org}/repos\"],\n listForUser: [\"GET /users/{username}/repos\"],\n listForks: [\"GET /repos/{owner}/{repo}/forks\"],\n listInvitations: [\"GET /repos/{owner}/{repo}/invitations\"],\n listInvitationsForAuthenticatedUser: [\"GET /user/repository_invitations\"],\n listLanguages: [\"GET /repos/{owner}/{repo}/languages\"],\n listPagesBuilds: [\"GET /repos/{owner}/{repo}/pages/builds\"],\n listPublic: [\"GET /repositories\"],\n listPullRequestsAssociatedWithCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\"],\n listReleaseAssets: [\"GET /repos/{owner}/{repo}/releases/{release_id}/assets\"],\n listReleases: [\"GET /repos/{owner}/{repo}/releases\"],\n listTags: [\"GET /repos/{owner}/{repo}/tags\"],\n listTeams: [\"GET /repos/{owner}/{repo}/teams\"],\n listWebhookDeliveries: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\"],\n listWebhooks: [\"GET /repos/{owner}/{repo}/hooks\"],\n merge: [\"POST /repos/{owner}/{repo}/merges\"],\n mergeUpstream: [\"POST /repos/{owner}/{repo}/merge-upstream\"],\n pingWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\"],\n removeAppAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n removeCollaborator: [\"DELETE /repos/{owner}/{repo}/collaborators/{username}\"],\n removeStatusCheckContexts: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n removeStatusCheckProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n removeTeamAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n removeUserAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n renameBranch: [\"POST /repos/{owner}/{repo}/branches/{branch}/rename\"],\n replaceAllTopics: [\"PUT /repos/{owner}/{repo}/topics\", {\n mediaType: {\n previews: [\"mercy\"]\n }\n }],\n requestPagesBuild: [\"POST /repos/{owner}/{repo}/pages/builds\"],\n setAdminBranchProtection: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n setAppAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n setStatusCheckContexts: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n setTeamAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n setUserAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n testPushWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/tests\"],\n transfer: [\"POST /repos/{owner}/{repo}/transfer\"],\n update: [\"PATCH /repos/{owner}/{repo}\"],\n updateBranchProtection: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection\"],\n updateCommitComment: [\"PATCH /repos/{owner}/{repo}/comments/{comment_id}\"],\n updateInformationAboutPagesSite: [\"PUT /repos/{owner}/{repo}/pages\"],\n updateInvitation: [\"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}\"],\n updatePullRequestReviewProtection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n updateRelease: [\"PATCH /repos/{owner}/{repo}/releases/{release_id}\"],\n updateReleaseAsset: [\"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n updateStatusCheckPotection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\", {}, {\n renamed: [\"repos\", \"updateStatusCheckProtection\"]\n }],\n updateStatusCheckProtection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n updateWebhook: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}\"],\n updateWebhookConfigForRepo: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config\"],\n uploadReleaseAsset: [\"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}\", {\n baseUrl: \"https://uploads.github.com\"\n }]\n },\n search: {\n code: [\"GET /search/code\"],\n commits: [\"GET /search/commits\"],\n issuesAndPullRequests: [\"GET /search/issues\"],\n labels: [\"GET /search/labels\"],\n repos: [\"GET /search/repositories\"],\n topics: [\"GET /search/topics\", {\n mediaType: {\n previews: [\"mercy\"]\n }\n }],\n users: [\"GET /search/users\"]\n },\n secretScanning: {\n getAlert: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"],\n listAlertsForOrg: [\"GET /orgs/{org}/secret-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts\"],\n updateAlert: [\"PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"]\n },\n teams: {\n addOrUpdateMembershipForUserInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n addOrUpdateProjectPermissionsInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n addOrUpdateRepoPermissionsInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n checkPermissionsForProjectInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n checkPermissionsForRepoInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n create: [\"POST /orgs/{org}/teams\"],\n createDiscussionCommentInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"],\n createDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions\"],\n deleteDiscussionCommentInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n deleteDiscussionInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n deleteInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}\"],\n getByName: [\"GET /orgs/{org}/teams/{team_slug}\"],\n getDiscussionCommentInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n getDiscussionInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n getMembershipForUserInOrg: [\"GET /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n list: [\"GET /orgs/{org}/teams\"],\n listChildInOrg: [\"GET /orgs/{org}/teams/{team_slug}/teams\"],\n listDiscussionCommentsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"],\n listDiscussionsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions\"],\n listForAuthenticatedUser: [\"GET /user/teams\"],\n listMembersInOrg: [\"GET /orgs/{org}/teams/{team_slug}/members\"],\n listPendingInvitationsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/invitations\"],\n listProjectsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects\"],\n listReposInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos\"],\n removeMembershipForUserInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n removeProjectInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n removeRepoInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n updateDiscussionCommentInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n updateDiscussionInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n updateInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}\"]\n },\n users: {\n addEmailForAuthenticated: [\"POST /user/emails\", {}, {\n renamed: [\"users\", \"addEmailForAuthenticatedUser\"]\n }],\n addEmailForAuthenticatedUser: [\"POST /user/emails\"],\n block: [\"PUT /user/blocks/{username}\"],\n checkBlocked: [\"GET /user/blocks/{username}\"],\n checkFollowingForUser: [\"GET /users/{username}/following/{target_user}\"],\n checkPersonIsFollowedByAuthenticated: [\"GET /user/following/{username}\"],\n createGpgKeyForAuthenticated: [\"POST /user/gpg_keys\", {}, {\n renamed: [\"users\", \"createGpgKeyForAuthenticatedUser\"]\n }],\n createGpgKeyForAuthenticatedUser: [\"POST /user/gpg_keys\"],\n createPublicSshKeyForAuthenticated: [\"POST /user/keys\", {}, {\n renamed: [\"users\", \"createPublicSshKeyForAuthenticatedUser\"]\n }],\n createPublicSshKeyForAuthenticatedUser: [\"POST /user/keys\"],\n deleteEmailForAuthenticated: [\"DELETE /user/emails\", {}, {\n renamed: [\"users\", \"deleteEmailForAuthenticatedUser\"]\n }],\n deleteEmailForAuthenticatedUser: [\"DELETE /user/emails\"],\n deleteGpgKeyForAuthenticated: [\"DELETE /user/gpg_keys/{gpg_key_id}\", {}, {\n renamed: [\"users\", \"deleteGpgKeyForAuthenticatedUser\"]\n }],\n deleteGpgKeyForAuthenticatedUser: [\"DELETE /user/gpg_keys/{gpg_key_id}\"],\n deletePublicSshKeyForAuthenticated: [\"DELETE /user/keys/{key_id}\", {}, {\n renamed: [\"users\", \"deletePublicSshKeyForAuthenticatedUser\"]\n }],\n deletePublicSshKeyForAuthenticatedUser: [\"DELETE /user/keys/{key_id}\"],\n follow: [\"PUT /user/following/{username}\"],\n getAuthenticated: [\"GET /user\"],\n getByUsername: [\"GET /users/{username}\"],\n getContextForUser: [\"GET /users/{username}/hovercard\"],\n getGpgKeyForAuthenticated: [\"GET /user/gpg_keys/{gpg_key_id}\", {}, {\n renamed: [\"users\", \"getGpgKeyForAuthenticatedUser\"]\n }],\n getGpgKeyForAuthenticatedUser: [\"GET /user/gpg_keys/{gpg_key_id}\"],\n getPublicSshKeyForAuthenticated: [\"GET /user/keys/{key_id}\", {}, {\n renamed: [\"users\", \"getPublicSshKeyForAuthenticatedUser\"]\n }],\n getPublicSshKeyForAuthenticatedUser: [\"GET /user/keys/{key_id}\"],\n list: [\"GET /users\"],\n listBlockedByAuthenticated: [\"GET /user/blocks\", {}, {\n renamed: [\"users\", \"listBlockedByAuthenticatedUser\"]\n }],\n listBlockedByAuthenticatedUser: [\"GET /user/blocks\"],\n listEmailsForAuthenticated: [\"GET /user/emails\", {}, {\n renamed: [\"users\", \"listEmailsForAuthenticatedUser\"]\n }],\n listEmailsForAuthenticatedUser: [\"GET /user/emails\"],\n listFollowedByAuthenticated: [\"GET /user/following\", {}, {\n renamed: [\"users\", \"listFollowedByAuthenticatedUser\"]\n }],\n listFollowedByAuthenticatedUser: [\"GET /user/following\"],\n listFollowersForAuthenticatedUser: [\"GET /user/followers\"],\n listFollowersForUser: [\"GET /users/{username}/followers\"],\n listFollowingForUser: [\"GET /users/{username}/following\"],\n listGpgKeysForAuthenticated: [\"GET /user/gpg_keys\", {}, {\n renamed: [\"users\", \"listGpgKeysForAuthenticatedUser\"]\n }],\n listGpgKeysForAuthenticatedUser: [\"GET /user/gpg_keys\"],\n listGpgKeysForUser: [\"GET /users/{username}/gpg_keys\"],\n listPublicEmailsForAuthenticated: [\"GET /user/public_emails\", {}, {\n renamed: [\"users\", \"listPublicEmailsForAuthenticatedUser\"]\n }],\n listPublicEmailsForAuthenticatedUser: [\"GET /user/public_emails\"],\n listPublicKeysForUser: [\"GET /users/{username}/keys\"],\n listPublicSshKeysForAuthenticated: [\"GET /user/keys\", {}, {\n renamed: [\"users\", \"listPublicSshKeysForAuthenticatedUser\"]\n }],\n listPublicSshKeysForAuthenticatedUser: [\"GET /user/keys\"],\n setPrimaryEmailVisibilityForAuthenticated: [\"PATCH /user/email/visibility\", {}, {\n renamed: [\"users\", \"setPrimaryEmailVisibilityForAuthenticatedUser\"]\n }],\n setPrimaryEmailVisibilityForAuthenticatedUser: [\"PATCH /user/email/visibility\"],\n unblock: [\"DELETE /user/blocks/{username}\"],\n unfollow: [\"DELETE /user/following/{username}\"],\n updateAuthenticated: [\"PATCH /user\"]\n }\n};\n\nconst VERSION = \"5.13.0\";\n\nfunction endpointsToMethods(octokit, endpointsMap) {\n const newMethods = {};\n\n for (const [scope, endpoints] of Object.entries(endpointsMap)) {\n for (const [methodName, endpoint] of Object.entries(endpoints)) {\n const [route, defaults, decorations] = endpoint;\n const [method, url] = route.split(/ /);\n const endpointDefaults = Object.assign({\n method,\n url\n }, defaults);\n\n if (!newMethods[scope]) {\n newMethods[scope] = {};\n }\n\n const scopeMethods = newMethods[scope];\n\n if (decorations) {\n scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);\n continue;\n }\n\n scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);\n }\n }\n\n return newMethods;\n}\n\nfunction decorate(octokit, scope, methodName, defaults, decorations) {\n const requestWithDefaults = octokit.request.defaults(defaults);\n /* istanbul ignore next */\n\n function withDecorations(...args) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData`\n\n if (decorations.mapToData) {\n options = Object.assign({}, options, {\n data: options[decorations.mapToData],\n [decorations.mapToData]: undefined\n });\n return requestWithDefaults(options);\n }\n\n if (decorations.renamed) {\n const [newScope, newMethodName] = decorations.renamed;\n octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);\n }\n\n if (decorations.deprecated) {\n octokit.log.warn(decorations.deprecated);\n }\n\n if (decorations.renamedParameters) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n const options = requestWithDefaults.endpoint.merge(...args);\n\n for (const [name, alias] of Object.entries(decorations.renamedParameters)) {\n if (name in options) {\n octokit.log.warn(`\"${name}\" parameter is deprecated for \"octokit.${scope}.${methodName}()\". Use \"${alias}\" instead`);\n\n if (!(alias in options)) {\n options[alias] = options[name];\n }\n\n delete options[name];\n }\n }\n\n return requestWithDefaults(options);\n } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n\n\n return requestWithDefaults(...args);\n }\n\n return Object.assign(withDecorations, requestWithDefaults);\n}\n\nfunction restEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, Endpoints);\n return {\n rest: api\n };\n}\nrestEndpointMethods.VERSION = VERSION;\nfunction legacyRestEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, Endpoints);\n return _objectSpread2(_objectSpread2({}, api), {}, {\n rest: api\n });\n}\nlegacyRestEndpointMethods.VERSION = VERSION;\n\nexports.legacyRestEndpointMethods = legacyRestEndpointMethods;\nexports.restEndpointMethods = restEndpointMethods;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar Bottleneck = _interopDefault(require('bottleneck/light'));\n\n// @ts-ignore\nasync function errorRequest(octokit, state, error, options) {\n if (!error.request || !error.request.request) {\n // address https://github.com/octokit/plugin-retry.js/issues/8\n throw error;\n } // retry all >= 400 && not doNotRetry\n\n\n if (error.status >= 400 && !state.doNotRetry.includes(error.status)) {\n const retries = options.request.retries != null ? options.request.retries : state.retries;\n const retryAfter = Math.pow((options.request.retryCount || 0) + 1, 2);\n throw octokit.retry.retryRequest(error, retries, retryAfter);\n } // Maybe eventually there will be more cases here\n\n\n throw error;\n}\n\n// @ts-ignore\n\nasync function wrapRequest(state, request, options) {\n const limiter = new Bottleneck(); // @ts-ignore\n\n limiter.on(\"failed\", function (error, info) {\n const maxRetries = ~~error.request.request.retries;\n const after = ~~error.request.request.retryAfter;\n options.request.retryCount = info.retryCount + 1;\n\n if (maxRetries > info.retryCount) {\n // Returning a number instructs the limiter to retry\n // the request after that number of milliseconds have passed\n return after * state.retryAfterBaseValue;\n }\n });\n return limiter.schedule(request, options);\n}\n\nconst VERSION = \"3.0.9\";\nfunction retry(octokit, octokitOptions) {\n const state = Object.assign({\n enabled: true,\n retryAfterBaseValue: 1000,\n doNotRetry: [400, 401, 403, 404, 422],\n retries: 3\n }, octokitOptions.retry);\n\n if (state.enabled) {\n octokit.hook.error(\"request\", errorRequest.bind(null, octokit, state));\n octokit.hook.wrap(\"request\", wrapRequest.bind(null, state));\n }\n\n return {\n retry: {\n retryRequest: (error, retries, retryAfter) => {\n error.request.request = Object.assign({}, error.request.request, {\n retries: retries,\n retryAfter: retryAfter\n });\n return error;\n }\n }\n };\n}\nretry.VERSION = VERSION;\n\nexports.VERSION = VERSION;\nexports.retry = retry;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar deprecation = require('deprecation');\nvar once = _interopDefault(require('once'));\n\nconst logOnceCode = once(deprecation => console.warn(deprecation));\nconst logOnceHeaders = once(deprecation => console.warn(deprecation));\n/**\n * Error with extra properties to help with debugging\n */\n\nclass RequestError extends Error {\n constructor(message, statusCode, options) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = \"HttpError\";\n this.status = statusCode;\n let headers;\n\n if (\"headers\" in options && typeof options.headers !== \"undefined\") {\n headers = options.headers;\n }\n\n if (\"response\" in options) {\n this.response = options.response;\n headers = options.response.headers;\n } // redact request credentials without mutating original request options\n\n\n const requestCopy = Object.assign({}, options.request);\n\n if (options.request.headers.authorization) {\n requestCopy.headers = Object.assign({}, options.request.headers, {\n authorization: options.request.headers.authorization.replace(/ .*$/, \" [REDACTED]\")\n });\n }\n\n requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit\n // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications\n .replace(/\\bclient_secret=\\w+/g, \"client_secret=[REDACTED]\") // OAuth tokens can be passed as URL query parameters, although it is not recommended\n // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header\n .replace(/\\baccess_token=\\w+/g, \"access_token=[REDACTED]\");\n this.request = requestCopy; // deprecations\n\n Object.defineProperty(this, \"code\", {\n get() {\n logOnceCode(new deprecation.Deprecation(\"[@octokit/request-error] `error.code` is deprecated, use `error.status`.\"));\n return statusCode;\n }\n\n });\n Object.defineProperty(this, \"headers\", {\n get() {\n logOnceHeaders(new deprecation.Deprecation(\"[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.\"));\n return headers || {};\n }\n\n });\n }\n\n}\n\nexports.RequestError = RequestError;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar endpoint = require('@octokit/endpoint');\nvar universalUserAgent = require('universal-user-agent');\nvar isPlainObject = require('is-plain-object');\nvar nodeFetch = _interopDefault(require('node-fetch'));\nvar requestError = require('@octokit/request-error');\n\nconst VERSION = \"5.6.3\";\n\nfunction getBufferResponse(response) {\n return response.arrayBuffer();\n}\n\nfunction fetchWrapper(requestOptions) {\n const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console;\n\n if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {\n requestOptions.body = JSON.stringify(requestOptions.body);\n }\n\n let headers = {};\n let status;\n let url;\n const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;\n return fetch(requestOptions.url, Object.assign({\n method: requestOptions.method,\n body: requestOptions.body,\n headers: requestOptions.headers,\n redirect: requestOptions.redirect\n }, // `requestOptions.request.agent` type is incompatible\n // see https://github.com/octokit/types.ts/pull/264\n requestOptions.request)).then(async response => {\n url = response.url;\n status = response.status;\n\n for (const keyAndValue of response.headers) {\n headers[keyAndValue[0]] = keyAndValue[1];\n }\n\n if (\"deprecation\" in headers) {\n const matches = headers.link && headers.link.match(/<([^>]+)>; rel=\"deprecation\"/);\n const deprecationLink = matches && matches.pop();\n log.warn(`[@octokit/request] \"${requestOptions.method} ${requestOptions.url}\" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : \"\"}`);\n }\n\n if (status === 204 || status === 205) {\n return;\n } // GitHub API returns 200 for HEAD requests\n\n\n if (requestOptions.method === \"HEAD\") {\n if (status < 400) {\n return;\n }\n\n throw new requestError.RequestError(response.statusText, status, {\n response: {\n url,\n status,\n headers,\n data: undefined\n },\n request: requestOptions\n });\n }\n\n if (status === 304) {\n throw new requestError.RequestError(\"Not modified\", status, {\n response: {\n url,\n status,\n headers,\n data: await getResponseData(response)\n },\n request: requestOptions\n });\n }\n\n if (status >= 400) {\n const data = await getResponseData(response);\n const error = new requestError.RequestError(toErrorMessage(data), status, {\n response: {\n url,\n status,\n headers,\n data\n },\n request: requestOptions\n });\n throw error;\n }\n\n return getResponseData(response);\n }).then(data => {\n return {\n status,\n url,\n headers,\n data\n };\n }).catch(error => {\n if (error instanceof requestError.RequestError) throw error;\n throw new requestError.RequestError(error.message, 500, {\n request: requestOptions\n });\n });\n}\n\nasync function getResponseData(response) {\n const contentType = response.headers.get(\"content-type\");\n\n if (/application\\/json/.test(contentType)) {\n return response.json();\n }\n\n if (!contentType || /^text\\/|charset=utf-8$/.test(contentType)) {\n return response.text();\n }\n\n return getBufferResponse(response);\n}\n\nfunction toErrorMessage(data) {\n if (typeof data === \"string\") return data; // istanbul ignore else - just in case\n\n if (\"message\" in data) {\n if (Array.isArray(data.errors)) {\n return `${data.message}: ${data.errors.map(JSON.stringify).join(\", \")}`;\n }\n\n return data.message;\n } // istanbul ignore next - just in case\n\n\n return `Unknown error: ${JSON.stringify(data)}`;\n}\n\nfunction withDefaults(oldEndpoint, newDefaults) {\n const endpoint = oldEndpoint.defaults(newDefaults);\n\n const newApi = function (route, parameters) {\n const endpointOptions = endpoint.merge(route, parameters);\n\n if (!endpointOptions.request || !endpointOptions.request.hook) {\n return fetchWrapper(endpoint.parse(endpointOptions));\n }\n\n const request = (route, parameters) => {\n return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));\n };\n\n Object.assign(request, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n return endpointOptions.request.hook(request, endpointOptions);\n };\n\n return Object.assign(newApi, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n}\n\nconst request = withDefaults(endpoint.endpoint, {\n headers: {\n \"user-agent\": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n }\n});\n\nexports.request = request;\n//# sourceMappingURL=index.js.map\n","var register = require('./lib/register')\nvar addHook = require('./lib/add')\nvar removeHook = require('./lib/remove')\n\n// bind with array of arguments: https://stackoverflow.com/a/21792913\nvar bind = Function.bind\nvar bindable = bind.bind(bind)\n\nfunction bindApi (hook, state, name) {\n var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state])\n hook.api = { remove: removeHookRef }\n hook.remove = removeHookRef\n\n ;['before', 'error', 'after', 'wrap'].forEach(function (kind) {\n var args = name ? [state, kind, name] : [state, kind]\n hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args)\n })\n}\n\nfunction HookSingular () {\n var singularHookName = 'h'\n var singularHookState = {\n registry: {}\n }\n var singularHook = register.bind(null, singularHookState, singularHookName)\n bindApi(singularHook, singularHookState, singularHookName)\n return singularHook\n}\n\nfunction HookCollection () {\n var state = {\n registry: {}\n }\n\n var hook = register.bind(null, state)\n bindApi(hook, state)\n\n return hook\n}\n\nvar collectionHookDeprecationMessageDisplayed = false\nfunction Hook () {\n if (!collectionHookDeprecationMessageDisplayed) {\n console.warn('[before-after-hook]: \"Hook()\" repurposing warning, use \"Hook.Collection()\". Read more: https://git.io/upgrade-before-after-hook-to-1.4')\n collectionHookDeprecationMessageDisplayed = true\n }\n return HookCollection()\n}\n\nHook.Singular = HookSingular.bind()\nHook.Collection = HookCollection.bind()\n\nmodule.exports = Hook\n// expose constructors as a named property for TypeScript\nmodule.exports.Hook = Hook\nmodule.exports.Singular = Hook.Singular\nmodule.exports.Collection = Hook.Collection\n","module.exports = addHook;\n\nfunction addHook(state, kind, name, hook) {\n var orig = hook;\n if (!state.registry[name]) {\n state.registry[name] = [];\n }\n\n if (kind === \"before\") {\n hook = function (method, options) {\n return Promise.resolve()\n .then(orig.bind(null, options))\n .then(method.bind(null, options));\n };\n }\n\n if (kind === \"after\") {\n hook = function (method, options) {\n var result;\n return Promise.resolve()\n .then(method.bind(null, options))\n .then(function (result_) {\n result = result_;\n return orig(result, options);\n })\n .then(function () {\n return result;\n });\n };\n }\n\n if (kind === \"error\") {\n hook = function (method, options) {\n return Promise.resolve()\n .then(method.bind(null, options))\n .catch(function (error) {\n return orig(error, options);\n });\n };\n }\n\n state.registry[name].push({\n hook: hook,\n orig: orig,\n });\n}\n","module.exports = register;\n\nfunction register(state, name, method, options) {\n if (typeof method !== \"function\") {\n throw new Error(\"method for before hook must be a function\");\n }\n\n if (!options) {\n options = {};\n }\n\n if (Array.isArray(name)) {\n return name.reverse().reduce(function (callback, name) {\n return register.bind(null, state, name, callback, options);\n }, method)();\n }\n\n return Promise.resolve().then(function () {\n if (!state.registry[name]) {\n return method(options);\n }\n\n return state.registry[name].reduce(function (method, registered) {\n return registered.hook.bind(null, method, options);\n }, method)();\n });\n}\n","module.exports = removeHook;\n\nfunction removeHook(state, name, method) {\n if (!state.registry[name]) {\n return;\n }\n\n var index = state.registry[name]\n .map(function (registered) {\n return registered.orig;\n })\n .indexOf(method);\n\n if (index === -1) {\n return;\n }\n\n state.registry[name].splice(index, 1);\n}\n","/**\n * This file contains the Bottleneck library (MIT), compiled to ES2017, and without Clustering support.\n * https://github.com/SGrondin/bottleneck\n */\n(function (global, factory) {\n\ttypeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :\n\ttypeof define === 'function' && define.amd ? define(factory) :\n\t(global.Bottleneck = factory());\n}(this, (function () { 'use strict';\n\n\tvar commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};\n\n\tfunction getCjsExportFromNamespace (n) {\n\t\treturn n && n['default'] || n;\n\t}\n\n\tvar load = function(received, defaults, onto = {}) {\n\t var k, ref, v;\n\t for (k in defaults) {\n\t v = defaults[k];\n\t onto[k] = (ref = received[k]) != null ? ref : v;\n\t }\n\t return onto;\n\t};\n\n\tvar overwrite = function(received, defaults, onto = {}) {\n\t var k, v;\n\t for (k in received) {\n\t v = received[k];\n\t if (defaults[k] !== void 0) {\n\t onto[k] = v;\n\t }\n\t }\n\t return onto;\n\t};\n\n\tvar parser = {\n\t\tload: load,\n\t\toverwrite: overwrite\n\t};\n\n\tvar DLList;\n\n\tDLList = class DLList {\n\t constructor(incr, decr) {\n\t this.incr = incr;\n\t this.decr = decr;\n\t this._first = null;\n\t this._last = null;\n\t this.length = 0;\n\t }\n\n\t push(value) {\n\t var node;\n\t this.length++;\n\t if (typeof this.incr === \"function\") {\n\t this.incr();\n\t }\n\t node = {\n\t value,\n\t prev: this._last,\n\t next: null\n\t };\n\t if (this._last != null) {\n\t this._last.next = node;\n\t this._last = node;\n\t } else {\n\t this._first = this._last = node;\n\t }\n\t return void 0;\n\t }\n\n\t shift() {\n\t var value;\n\t if (this._first == null) {\n\t return;\n\t } else {\n\t this.length--;\n\t if (typeof this.decr === \"function\") {\n\t this.decr();\n\t }\n\t }\n\t value = this._first.value;\n\t if ((this._first = this._first.next) != null) {\n\t this._first.prev = null;\n\t } else {\n\t this._last = null;\n\t }\n\t return value;\n\t }\n\n\t first() {\n\t if (this._first != null) {\n\t return this._first.value;\n\t }\n\t }\n\n\t getArray() {\n\t var node, ref, results;\n\t node = this._first;\n\t results = [];\n\t while (node != null) {\n\t results.push((ref = node, node = node.next, ref.value));\n\t }\n\t return results;\n\t }\n\n\t forEachShift(cb) {\n\t var node;\n\t node = this.shift();\n\t while (node != null) {\n\t (cb(node), node = this.shift());\n\t }\n\t return void 0;\n\t }\n\n\t debug() {\n\t var node, ref, ref1, ref2, results;\n\t node = this._first;\n\t results = [];\n\t while (node != null) {\n\t results.push((ref = node, node = node.next, {\n\t value: ref.value,\n\t prev: (ref1 = ref.prev) != null ? ref1.value : void 0,\n\t next: (ref2 = ref.next) != null ? ref2.value : void 0\n\t }));\n\t }\n\t return results;\n\t }\n\n\t};\n\n\tvar DLList_1 = DLList;\n\n\tvar Events;\n\n\tEvents = class Events {\n\t constructor(instance) {\n\t this.instance = instance;\n\t this._events = {};\n\t if ((this.instance.on != null) || (this.instance.once != null) || (this.instance.removeAllListeners != null)) {\n\t throw new Error(\"An Emitter already exists for this object\");\n\t }\n\t this.instance.on = (name, cb) => {\n\t return this._addListener(name, \"many\", cb);\n\t };\n\t this.instance.once = (name, cb) => {\n\t return this._addListener(name, \"once\", cb);\n\t };\n\t this.instance.removeAllListeners = (name = null) => {\n\t if (name != null) {\n\t return delete this._events[name];\n\t } else {\n\t return this._events = {};\n\t }\n\t };\n\t }\n\n\t _addListener(name, status, cb) {\n\t var base;\n\t if ((base = this._events)[name] == null) {\n\t base[name] = [];\n\t }\n\t this._events[name].push({cb, status});\n\t return this.instance;\n\t }\n\n\t listenerCount(name) {\n\t if (this._events[name] != null) {\n\t return this._events[name].length;\n\t } else {\n\t return 0;\n\t }\n\t }\n\n\t async trigger(name, ...args) {\n\t var e, promises;\n\t try {\n\t if (name !== \"debug\") {\n\t this.trigger(\"debug\", `Event triggered: ${name}`, args);\n\t }\n\t if (this._events[name] == null) {\n\t return;\n\t }\n\t this._events[name] = this._events[name].filter(function(listener) {\n\t return listener.status !== \"none\";\n\t });\n\t promises = this._events[name].map(async(listener) => {\n\t var e, returned;\n\t if (listener.status === \"none\") {\n\t return;\n\t }\n\t if (listener.status === \"once\") {\n\t listener.status = \"none\";\n\t }\n\t try {\n\t returned = typeof listener.cb === \"function\" ? listener.cb(...args) : void 0;\n\t if (typeof (returned != null ? returned.then : void 0) === \"function\") {\n\t return (await returned);\n\t } else {\n\t return returned;\n\t }\n\t } catch (error) {\n\t e = error;\n\t {\n\t this.trigger(\"error\", e);\n\t }\n\t return null;\n\t }\n\t });\n\t return ((await Promise.all(promises))).find(function(x) {\n\t return x != null;\n\t });\n\t } catch (error) {\n\t e = error;\n\t {\n\t this.trigger(\"error\", e);\n\t }\n\t return null;\n\t }\n\t }\n\n\t};\n\n\tvar Events_1 = Events;\n\n\tvar DLList$1, Events$1, Queues;\n\n\tDLList$1 = DLList_1;\n\n\tEvents$1 = Events_1;\n\n\tQueues = class Queues {\n\t constructor(num_priorities) {\n\t var i;\n\t this.Events = new Events$1(this);\n\t this._length = 0;\n\t this._lists = (function() {\n\t var j, ref, results;\n\t results = [];\n\t for (i = j = 1, ref = num_priorities; (1 <= ref ? j <= ref : j >= ref); i = 1 <= ref ? ++j : --j) {\n\t results.push(new DLList$1((() => {\n\t return this.incr();\n\t }), (() => {\n\t return this.decr();\n\t })));\n\t }\n\t return results;\n\t }).call(this);\n\t }\n\n\t incr() {\n\t if (this._length++ === 0) {\n\t return this.Events.trigger(\"leftzero\");\n\t }\n\t }\n\n\t decr() {\n\t if (--this._length === 0) {\n\t return this.Events.trigger(\"zero\");\n\t }\n\t }\n\n\t push(job) {\n\t return this._lists[job.options.priority].push(job);\n\t }\n\n\t queued(priority) {\n\t if (priority != null) {\n\t return this._lists[priority].length;\n\t } else {\n\t return this._length;\n\t }\n\t }\n\n\t shiftAll(fn) {\n\t return this._lists.forEach(function(list) {\n\t return list.forEachShift(fn);\n\t });\n\t }\n\n\t getFirst(arr = this._lists) {\n\t var j, len, list;\n\t for (j = 0, len = arr.length; j < len; j++) {\n\t list = arr[j];\n\t if (list.length > 0) {\n\t return list;\n\t }\n\t }\n\t return [];\n\t }\n\n\t shiftLastFrom(priority) {\n\t return this.getFirst(this._lists.slice(priority).reverse()).shift();\n\t }\n\n\t};\n\n\tvar Queues_1 = Queues;\n\n\tvar BottleneckError;\n\n\tBottleneckError = class BottleneckError extends Error {};\n\n\tvar BottleneckError_1 = BottleneckError;\n\n\tvar BottleneckError$1, DEFAULT_PRIORITY, Job, NUM_PRIORITIES, parser$1;\n\n\tNUM_PRIORITIES = 10;\n\n\tDEFAULT_PRIORITY = 5;\n\n\tparser$1 = parser;\n\n\tBottleneckError$1 = BottleneckError_1;\n\n\tJob = class Job {\n\t constructor(task, args, options, jobDefaults, rejectOnDrop, Events, _states, Promise) {\n\t this.task = task;\n\t this.args = args;\n\t this.rejectOnDrop = rejectOnDrop;\n\t this.Events = Events;\n\t this._states = _states;\n\t this.Promise = Promise;\n\t this.options = parser$1.load(options, jobDefaults);\n\t this.options.priority = this._sanitizePriority(this.options.priority);\n\t if (this.options.id === jobDefaults.id) {\n\t this.options.id = `${this.options.id}-${this._randomIndex()}`;\n\t }\n\t this.promise = new this.Promise((_resolve, _reject) => {\n\t this._resolve = _resolve;\n\t this._reject = _reject;\n\t });\n\t this.retryCount = 0;\n\t }\n\n\t _sanitizePriority(priority) {\n\t var sProperty;\n\t sProperty = ~~priority !== priority ? DEFAULT_PRIORITY : priority;\n\t if (sProperty < 0) {\n\t return 0;\n\t } else if (sProperty > NUM_PRIORITIES - 1) {\n\t return NUM_PRIORITIES - 1;\n\t } else {\n\t return sProperty;\n\t }\n\t }\n\n\t _randomIndex() {\n\t return Math.random().toString(36).slice(2);\n\t }\n\n\t doDrop({error, message = \"This job has been dropped by Bottleneck\"} = {}) {\n\t if (this._states.remove(this.options.id)) {\n\t if (this.rejectOnDrop) {\n\t this._reject(error != null ? error : new BottleneckError$1(message));\n\t }\n\t this.Events.trigger(\"dropped\", {args: this.args, options: this.options, task: this.task, promise: this.promise});\n\t return true;\n\t } else {\n\t return false;\n\t }\n\t }\n\n\t _assertStatus(expected) {\n\t var status;\n\t status = this._states.jobStatus(this.options.id);\n\t if (!(status === expected || (expected === \"DONE\" && status === null))) {\n\t throw new BottleneckError$1(`Invalid job status ${status}, expected ${expected}. Please open an issue at https://github.com/SGrondin/bottleneck/issues`);\n\t }\n\t }\n\n\t doReceive() {\n\t this._states.start(this.options.id);\n\t return this.Events.trigger(\"received\", {args: this.args, options: this.options});\n\t }\n\n\t doQueue(reachedHWM, blocked) {\n\t this._assertStatus(\"RECEIVED\");\n\t this._states.next(this.options.id);\n\t return this.Events.trigger(\"queued\", {args: this.args, options: this.options, reachedHWM, blocked});\n\t }\n\n\t doRun() {\n\t if (this.retryCount === 0) {\n\t this._assertStatus(\"QUEUED\");\n\t this._states.next(this.options.id);\n\t } else {\n\t this._assertStatus(\"EXECUTING\");\n\t }\n\t return this.Events.trigger(\"scheduled\", {args: this.args, options: this.options});\n\t }\n\n\t async doExecute(chained, clearGlobalState, run, free) {\n\t var error, eventInfo, passed;\n\t if (this.retryCount === 0) {\n\t this._assertStatus(\"RUNNING\");\n\t this._states.next(this.options.id);\n\t } else {\n\t this._assertStatus(\"EXECUTING\");\n\t }\n\t eventInfo = {args: this.args, options: this.options, retryCount: this.retryCount};\n\t this.Events.trigger(\"executing\", eventInfo);\n\t try {\n\t passed = (await (chained != null ? chained.schedule(this.options, this.task, ...this.args) : this.task(...this.args)));\n\t if (clearGlobalState()) {\n\t this.doDone(eventInfo);\n\t await free(this.options, eventInfo);\n\t this._assertStatus(\"DONE\");\n\t return this._resolve(passed);\n\t }\n\t } catch (error1) {\n\t error = error1;\n\t return this._onFailure(error, eventInfo, clearGlobalState, run, free);\n\t }\n\t }\n\n\t doExpire(clearGlobalState, run, free) {\n\t var error, eventInfo;\n\t if (this._states.jobStatus(this.options.id === \"RUNNING\")) {\n\t this._states.next(this.options.id);\n\t }\n\t this._assertStatus(\"EXECUTING\");\n\t eventInfo = {args: this.args, options: this.options, retryCount: this.retryCount};\n\t error = new BottleneckError$1(`This job timed out after ${this.options.expiration} ms.`);\n\t return this._onFailure(error, eventInfo, clearGlobalState, run, free);\n\t }\n\n\t async _onFailure(error, eventInfo, clearGlobalState, run, free) {\n\t var retry, retryAfter;\n\t if (clearGlobalState()) {\n\t retry = (await this.Events.trigger(\"failed\", error, eventInfo));\n\t if (retry != null) {\n\t retryAfter = ~~retry;\n\t this.Events.trigger(\"retry\", `Retrying ${this.options.id} after ${retryAfter} ms`, eventInfo);\n\t this.retryCount++;\n\t return run(retryAfter);\n\t } else {\n\t this.doDone(eventInfo);\n\t await free(this.options, eventInfo);\n\t this._assertStatus(\"DONE\");\n\t return this._reject(error);\n\t }\n\t }\n\t }\n\n\t doDone(eventInfo) {\n\t this._assertStatus(\"EXECUTING\");\n\t this._states.next(this.options.id);\n\t return this.Events.trigger(\"done\", eventInfo);\n\t }\n\n\t};\n\n\tvar Job_1 = Job;\n\n\tvar BottleneckError$2, LocalDatastore, parser$2;\n\n\tparser$2 = parser;\n\n\tBottleneckError$2 = BottleneckError_1;\n\n\tLocalDatastore = class LocalDatastore {\n\t constructor(instance, storeOptions, storeInstanceOptions) {\n\t this.instance = instance;\n\t this.storeOptions = storeOptions;\n\t this.clientId = this.instance._randomIndex();\n\t parser$2.load(storeInstanceOptions, storeInstanceOptions, this);\n\t this._nextRequest = this._lastReservoirRefresh = this._lastReservoirIncrease = Date.now();\n\t this._running = 0;\n\t this._done = 0;\n\t this._unblockTime = 0;\n\t this.ready = this.Promise.resolve();\n\t this.clients = {};\n\t this._startHeartbeat();\n\t }\n\n\t _startHeartbeat() {\n\t var base;\n\t if ((this.heartbeat == null) && (((this.storeOptions.reservoirRefreshInterval != null) && (this.storeOptions.reservoirRefreshAmount != null)) || ((this.storeOptions.reservoirIncreaseInterval != null) && (this.storeOptions.reservoirIncreaseAmount != null)))) {\n\t return typeof (base = (this.heartbeat = setInterval(() => {\n\t var amount, incr, maximum, now, reservoir;\n\t now = Date.now();\n\t if ((this.storeOptions.reservoirRefreshInterval != null) && now >= this._lastReservoirRefresh + this.storeOptions.reservoirRefreshInterval) {\n\t this._lastReservoirRefresh = now;\n\t this.storeOptions.reservoir = this.storeOptions.reservoirRefreshAmount;\n\t this.instance._drainAll(this.computeCapacity());\n\t }\n\t if ((this.storeOptions.reservoirIncreaseInterval != null) && now >= this._lastReservoirIncrease + this.storeOptions.reservoirIncreaseInterval) {\n\t ({\n\t reservoirIncreaseAmount: amount,\n\t reservoirIncreaseMaximum: maximum,\n\t reservoir\n\t } = this.storeOptions);\n\t this._lastReservoirIncrease = now;\n\t incr = maximum != null ? Math.min(amount, maximum - reservoir) : amount;\n\t if (incr > 0) {\n\t this.storeOptions.reservoir += incr;\n\t return this.instance._drainAll(this.computeCapacity());\n\t }\n\t }\n\t }, this.heartbeatInterval))).unref === \"function\" ? base.unref() : void 0;\n\t } else {\n\t return clearInterval(this.heartbeat);\n\t }\n\t }\n\n\t async __publish__(message) {\n\t await this.yieldLoop();\n\t return this.instance.Events.trigger(\"message\", message.toString());\n\t }\n\n\t async __disconnect__(flush) {\n\t await this.yieldLoop();\n\t clearInterval(this.heartbeat);\n\t return this.Promise.resolve();\n\t }\n\n\t yieldLoop(t = 0) {\n\t return new this.Promise(function(resolve, reject) {\n\t return setTimeout(resolve, t);\n\t });\n\t }\n\n\t computePenalty() {\n\t var ref;\n\t return (ref = this.storeOptions.penalty) != null ? ref : (15 * this.storeOptions.minTime) || 5000;\n\t }\n\n\t async __updateSettings__(options) {\n\t await this.yieldLoop();\n\t parser$2.overwrite(options, options, this.storeOptions);\n\t this._startHeartbeat();\n\t this.instance._drainAll(this.computeCapacity());\n\t return true;\n\t }\n\n\t async __running__() {\n\t await this.yieldLoop();\n\t return this._running;\n\t }\n\n\t async __queued__() {\n\t await this.yieldLoop();\n\t return this.instance.queued();\n\t }\n\n\t async __done__() {\n\t await this.yieldLoop();\n\t return this._done;\n\t }\n\n\t async __groupCheck__(time) {\n\t await this.yieldLoop();\n\t return (this._nextRequest + this.timeout) < time;\n\t }\n\n\t computeCapacity() {\n\t var maxConcurrent, reservoir;\n\t ({maxConcurrent, reservoir} = this.storeOptions);\n\t if ((maxConcurrent != null) && (reservoir != null)) {\n\t return Math.min(maxConcurrent - this._running, reservoir);\n\t } else if (maxConcurrent != null) {\n\t return maxConcurrent - this._running;\n\t } else if (reservoir != null) {\n\t return reservoir;\n\t } else {\n\t return null;\n\t }\n\t }\n\n\t conditionsCheck(weight) {\n\t var capacity;\n\t capacity = this.computeCapacity();\n\t return (capacity == null) || weight <= capacity;\n\t }\n\n\t async __incrementReservoir__(incr) {\n\t var reservoir;\n\t await this.yieldLoop();\n\t reservoir = this.storeOptions.reservoir += incr;\n\t this.instance._drainAll(this.computeCapacity());\n\t return reservoir;\n\t }\n\n\t async __currentReservoir__() {\n\t await this.yieldLoop();\n\t return this.storeOptions.reservoir;\n\t }\n\n\t isBlocked(now) {\n\t return this._unblockTime >= now;\n\t }\n\n\t check(weight, now) {\n\t return this.conditionsCheck(weight) && (this._nextRequest - now) <= 0;\n\t }\n\n\t async __check__(weight) {\n\t var now;\n\t await this.yieldLoop();\n\t now = Date.now();\n\t return this.check(weight, now);\n\t }\n\n\t async __register__(index, weight, expiration) {\n\t var now, wait;\n\t await this.yieldLoop();\n\t now = Date.now();\n\t if (this.conditionsCheck(weight)) {\n\t this._running += weight;\n\t if (this.storeOptions.reservoir != null) {\n\t this.storeOptions.reservoir -= weight;\n\t }\n\t wait = Math.max(this._nextRequest - now, 0);\n\t this._nextRequest = now + wait + this.storeOptions.minTime;\n\t return {\n\t success: true,\n\t wait,\n\t reservoir: this.storeOptions.reservoir\n\t };\n\t } else {\n\t return {\n\t success: false\n\t };\n\t }\n\t }\n\n\t strategyIsBlock() {\n\t return this.storeOptions.strategy === 3;\n\t }\n\n\t async __submit__(queueLength, weight) {\n\t var blocked, now, reachedHWM;\n\t await this.yieldLoop();\n\t if ((this.storeOptions.maxConcurrent != null) && weight > this.storeOptions.maxConcurrent) {\n\t throw new BottleneckError$2(`Impossible to add a job having a weight of ${weight} to a limiter having a maxConcurrent setting of ${this.storeOptions.maxConcurrent}`);\n\t }\n\t now = Date.now();\n\t reachedHWM = (this.storeOptions.highWater != null) && queueLength === this.storeOptions.highWater && !this.check(weight, now);\n\t blocked = this.strategyIsBlock() && (reachedHWM || this.isBlocked(now));\n\t if (blocked) {\n\t this._unblockTime = now + this.computePenalty();\n\t this._nextRequest = this._unblockTime + this.storeOptions.minTime;\n\t this.instance._dropAllQueued();\n\t }\n\t return {\n\t reachedHWM,\n\t blocked,\n\t strategy: this.storeOptions.strategy\n\t };\n\t }\n\n\t async __free__(index, weight) {\n\t await this.yieldLoop();\n\t this._running -= weight;\n\t this._done += weight;\n\t this.instance._drainAll(this.computeCapacity());\n\t return {\n\t running: this._running\n\t };\n\t }\n\n\t};\n\n\tvar LocalDatastore_1 = LocalDatastore;\n\n\tvar BottleneckError$3, States;\n\n\tBottleneckError$3 = BottleneckError_1;\n\n\tStates = class States {\n\t constructor(status1) {\n\t this.status = status1;\n\t this._jobs = {};\n\t this.counts = this.status.map(function() {\n\t return 0;\n\t });\n\t }\n\n\t next(id) {\n\t var current, next;\n\t current = this._jobs[id];\n\t next = current + 1;\n\t if ((current != null) && next < this.status.length) {\n\t this.counts[current]--;\n\t this.counts[next]++;\n\t return this._jobs[id]++;\n\t } else if (current != null) {\n\t this.counts[current]--;\n\t return delete this._jobs[id];\n\t }\n\t }\n\n\t start(id) {\n\t var initial;\n\t initial = 0;\n\t this._jobs[id] = initial;\n\t return this.counts[initial]++;\n\t }\n\n\t remove(id) {\n\t var current;\n\t current = this._jobs[id];\n\t if (current != null) {\n\t this.counts[current]--;\n\t delete this._jobs[id];\n\t }\n\t return current != null;\n\t }\n\n\t jobStatus(id) {\n\t var ref;\n\t return (ref = this.status[this._jobs[id]]) != null ? ref : null;\n\t }\n\n\t statusJobs(status) {\n\t var k, pos, ref, results, v;\n\t if (status != null) {\n\t pos = this.status.indexOf(status);\n\t if (pos < 0) {\n\t throw new BottleneckError$3(`status must be one of ${this.status.join(', ')}`);\n\t }\n\t ref = this._jobs;\n\t results = [];\n\t for (k in ref) {\n\t v = ref[k];\n\t if (v === pos) {\n\t results.push(k);\n\t }\n\t }\n\t return results;\n\t } else {\n\t return Object.keys(this._jobs);\n\t }\n\t }\n\n\t statusCounts() {\n\t return this.counts.reduce(((acc, v, i) => {\n\t acc[this.status[i]] = v;\n\t return acc;\n\t }), {});\n\t }\n\n\t};\n\n\tvar States_1 = States;\n\n\tvar DLList$2, Sync;\n\n\tDLList$2 = DLList_1;\n\n\tSync = class Sync {\n\t constructor(name, Promise) {\n\t this.schedule = this.schedule.bind(this);\n\t this.name = name;\n\t this.Promise = Promise;\n\t this._running = 0;\n\t this._queue = new DLList$2();\n\t }\n\n\t isEmpty() {\n\t return this._queue.length === 0;\n\t }\n\n\t async _tryToRun() {\n\t var args, cb, error, reject, resolve, returned, task;\n\t if ((this._running < 1) && this._queue.length > 0) {\n\t this._running++;\n\t ({task, args, resolve, reject} = this._queue.shift());\n\t cb = (await (async function() {\n\t try {\n\t returned = (await task(...args));\n\t return function() {\n\t return resolve(returned);\n\t };\n\t } catch (error1) {\n\t error = error1;\n\t return function() {\n\t return reject(error);\n\t };\n\t }\n\t })());\n\t this._running--;\n\t this._tryToRun();\n\t return cb();\n\t }\n\t }\n\n\t schedule(task, ...args) {\n\t var promise, reject, resolve;\n\t resolve = reject = null;\n\t promise = new this.Promise(function(_resolve, _reject) {\n\t resolve = _resolve;\n\t return reject = _reject;\n\t });\n\t this._queue.push({task, args, resolve, reject});\n\t this._tryToRun();\n\t return promise;\n\t }\n\n\t};\n\n\tvar Sync_1 = Sync;\n\n\tvar version = \"2.19.5\";\n\tvar version$1 = {\n\t\tversion: version\n\t};\n\n\tvar version$2 = /*#__PURE__*/Object.freeze({\n\t\tversion: version,\n\t\tdefault: version$1\n\t});\n\n\tvar require$$2 = () => console.log('You must import the full version of Bottleneck in order to use this feature.');\n\n\tvar require$$3 = () => console.log('You must import the full version of Bottleneck in order to use this feature.');\n\n\tvar require$$4 = () => console.log('You must import the full version of Bottleneck in order to use this feature.');\n\n\tvar Events$2, Group, IORedisConnection$1, RedisConnection$1, Scripts$1, parser$3;\n\n\tparser$3 = parser;\n\n\tEvents$2 = Events_1;\n\n\tRedisConnection$1 = require$$2;\n\n\tIORedisConnection$1 = require$$3;\n\n\tScripts$1 = require$$4;\n\n\tGroup = (function() {\n\t class Group {\n\t constructor(limiterOptions = {}) {\n\t this.deleteKey = this.deleteKey.bind(this);\n\t this.limiterOptions = limiterOptions;\n\t parser$3.load(this.limiterOptions, this.defaults, this);\n\t this.Events = new Events$2(this);\n\t this.instances = {};\n\t this.Bottleneck = Bottleneck_1;\n\t this._startAutoCleanup();\n\t this.sharedConnection = this.connection != null;\n\t if (this.connection == null) {\n\t if (this.limiterOptions.datastore === \"redis\") {\n\t this.connection = new RedisConnection$1(Object.assign({}, this.limiterOptions, {Events: this.Events}));\n\t } else if (this.limiterOptions.datastore === \"ioredis\") {\n\t this.connection = new IORedisConnection$1(Object.assign({}, this.limiterOptions, {Events: this.Events}));\n\t }\n\t }\n\t }\n\n\t key(key = \"\") {\n\t var ref;\n\t return (ref = this.instances[key]) != null ? ref : (() => {\n\t var limiter;\n\t limiter = this.instances[key] = new this.Bottleneck(Object.assign(this.limiterOptions, {\n\t id: `${this.id}-${key}`,\n\t timeout: this.timeout,\n\t connection: this.connection\n\t }));\n\t this.Events.trigger(\"created\", limiter, key);\n\t return limiter;\n\t })();\n\t }\n\n\t async deleteKey(key = \"\") {\n\t var deleted, instance;\n\t instance = this.instances[key];\n\t if (this.connection) {\n\t deleted = (await this.connection.__runCommand__(['del', ...Scripts$1.allKeys(`${this.id}-${key}`)]));\n\t }\n\t if (instance != null) {\n\t delete this.instances[key];\n\t await instance.disconnect();\n\t }\n\t return (instance != null) || deleted > 0;\n\t }\n\n\t limiters() {\n\t var k, ref, results, v;\n\t ref = this.instances;\n\t results = [];\n\t for (k in ref) {\n\t v = ref[k];\n\t results.push({\n\t key: k,\n\t limiter: v\n\t });\n\t }\n\t return results;\n\t }\n\n\t keys() {\n\t return Object.keys(this.instances);\n\t }\n\n\t async clusterKeys() {\n\t var cursor, end, found, i, k, keys, len, next, start;\n\t if (this.connection == null) {\n\t return this.Promise.resolve(this.keys());\n\t }\n\t keys = [];\n\t cursor = null;\n\t start = `b_${this.id}-`.length;\n\t end = \"_settings\".length;\n\t while (cursor !== 0) {\n\t [next, found] = (await this.connection.__runCommand__([\"scan\", cursor != null ? cursor : 0, \"match\", `b_${this.id}-*_settings`, \"count\", 10000]));\n\t cursor = ~~next;\n\t for (i = 0, len = found.length; i < len; i++) {\n\t k = found[i];\n\t keys.push(k.slice(start, -end));\n\t }\n\t }\n\t return keys;\n\t }\n\n\t _startAutoCleanup() {\n\t var base;\n\t clearInterval(this.interval);\n\t return typeof (base = (this.interval = setInterval(async() => {\n\t var e, k, ref, results, time, v;\n\t time = Date.now();\n\t ref = this.instances;\n\t results = [];\n\t for (k in ref) {\n\t v = ref[k];\n\t try {\n\t if ((await v._store.__groupCheck__(time))) {\n\t results.push(this.deleteKey(k));\n\t } else {\n\t results.push(void 0);\n\t }\n\t } catch (error) {\n\t e = error;\n\t results.push(v.Events.trigger(\"error\", e));\n\t }\n\t }\n\t return results;\n\t }, this.timeout / 2))).unref === \"function\" ? base.unref() : void 0;\n\t }\n\n\t updateSettings(options = {}) {\n\t parser$3.overwrite(options, this.defaults, this);\n\t parser$3.overwrite(options, options, this.limiterOptions);\n\t if (options.timeout != null) {\n\t return this._startAutoCleanup();\n\t }\n\t }\n\n\t disconnect(flush = true) {\n\t var ref;\n\t if (!this.sharedConnection) {\n\t return (ref = this.connection) != null ? ref.disconnect(flush) : void 0;\n\t }\n\t }\n\n\t }\n\t Group.prototype.defaults = {\n\t timeout: 1000 * 60 * 5,\n\t connection: null,\n\t Promise: Promise,\n\t id: \"group-key\"\n\t };\n\n\t return Group;\n\n\t}).call(commonjsGlobal);\n\n\tvar Group_1 = Group;\n\n\tvar Batcher, Events$3, parser$4;\n\n\tparser$4 = parser;\n\n\tEvents$3 = Events_1;\n\n\tBatcher = (function() {\n\t class Batcher {\n\t constructor(options = {}) {\n\t this.options = options;\n\t parser$4.load(this.options, this.defaults, this);\n\t this.Events = new Events$3(this);\n\t this._arr = [];\n\t this._resetPromise();\n\t this._lastFlush = Date.now();\n\t }\n\n\t _resetPromise() {\n\t return this._promise = new this.Promise((res, rej) => {\n\t return this._resolve = res;\n\t });\n\t }\n\n\t _flush() {\n\t clearTimeout(this._timeout);\n\t this._lastFlush = Date.now();\n\t this._resolve();\n\t this.Events.trigger(\"batch\", this._arr);\n\t this._arr = [];\n\t return this._resetPromise();\n\t }\n\n\t add(data) {\n\t var ret;\n\t this._arr.push(data);\n\t ret = this._promise;\n\t if (this._arr.length === this.maxSize) {\n\t this._flush();\n\t } else if ((this.maxTime != null) && this._arr.length === 1) {\n\t this._timeout = setTimeout(() => {\n\t return this._flush();\n\t }, this.maxTime);\n\t }\n\t return ret;\n\t }\n\n\t }\n\t Batcher.prototype.defaults = {\n\t maxTime: null,\n\t maxSize: null,\n\t Promise: Promise\n\t };\n\n\t return Batcher;\n\n\t}).call(commonjsGlobal);\n\n\tvar Batcher_1 = Batcher;\n\n\tvar require$$4$1 = () => console.log('You must import the full version of Bottleneck in order to use this feature.');\n\n\tvar require$$8 = getCjsExportFromNamespace(version$2);\n\n\tvar Bottleneck, DEFAULT_PRIORITY$1, Events$4, Job$1, LocalDatastore$1, NUM_PRIORITIES$1, Queues$1, RedisDatastore$1, States$1, Sync$1, parser$5,\n\t splice = [].splice;\n\n\tNUM_PRIORITIES$1 = 10;\n\n\tDEFAULT_PRIORITY$1 = 5;\n\n\tparser$5 = parser;\n\n\tQueues$1 = Queues_1;\n\n\tJob$1 = Job_1;\n\n\tLocalDatastore$1 = LocalDatastore_1;\n\n\tRedisDatastore$1 = require$$4$1;\n\n\tEvents$4 = Events_1;\n\n\tStates$1 = States_1;\n\n\tSync$1 = Sync_1;\n\n\tBottleneck = (function() {\n\t class Bottleneck {\n\t constructor(options = {}, ...invalid) {\n\t var storeInstanceOptions, storeOptions;\n\t this._addToQueue = this._addToQueue.bind(this);\n\t this._validateOptions(options, invalid);\n\t parser$5.load(options, this.instanceDefaults, this);\n\t this._queues = new Queues$1(NUM_PRIORITIES$1);\n\t this._scheduled = {};\n\t this._states = new States$1([\"RECEIVED\", \"QUEUED\", \"RUNNING\", \"EXECUTING\"].concat(this.trackDoneStatus ? [\"DONE\"] : []));\n\t this._limiter = null;\n\t this.Events = new Events$4(this);\n\t this._submitLock = new Sync$1(\"submit\", this.Promise);\n\t this._registerLock = new Sync$1(\"register\", this.Promise);\n\t storeOptions = parser$5.load(options, this.storeDefaults, {});\n\t this._store = (function() {\n\t if (this.datastore === \"redis\" || this.datastore === \"ioredis\" || (this.connection != null)) {\n\t storeInstanceOptions = parser$5.load(options, this.redisStoreDefaults, {});\n\t return new RedisDatastore$1(this, storeOptions, storeInstanceOptions);\n\t } else if (this.datastore === \"local\") {\n\t storeInstanceOptions = parser$5.load(options, this.localStoreDefaults, {});\n\t return new LocalDatastore$1(this, storeOptions, storeInstanceOptions);\n\t } else {\n\t throw new Bottleneck.prototype.BottleneckError(`Invalid datastore type: ${this.datastore}`);\n\t }\n\t }).call(this);\n\t this._queues.on(\"leftzero\", () => {\n\t var ref;\n\t return (ref = this._store.heartbeat) != null ? typeof ref.ref === \"function\" ? ref.ref() : void 0 : void 0;\n\t });\n\t this._queues.on(\"zero\", () => {\n\t var ref;\n\t return (ref = this._store.heartbeat) != null ? typeof ref.unref === \"function\" ? ref.unref() : void 0 : void 0;\n\t });\n\t }\n\n\t _validateOptions(options, invalid) {\n\t if (!((options != null) && typeof options === \"object\" && invalid.length === 0)) {\n\t throw new Bottleneck.prototype.BottleneckError(\"Bottleneck v2 takes a single object argument. Refer to https://github.com/SGrondin/bottleneck#upgrading-to-v2 if you're upgrading from Bottleneck v1.\");\n\t }\n\t }\n\n\t ready() {\n\t return this._store.ready;\n\t }\n\n\t clients() {\n\t return this._store.clients;\n\t }\n\n\t channel() {\n\t return `b_${this.id}`;\n\t }\n\n\t channel_client() {\n\t return `b_${this.id}_${this._store.clientId}`;\n\t }\n\n\t publish(message) {\n\t return this._store.__publish__(message);\n\t }\n\n\t disconnect(flush = true) {\n\t return this._store.__disconnect__(flush);\n\t }\n\n\t chain(_limiter) {\n\t this._limiter = _limiter;\n\t return this;\n\t }\n\n\t queued(priority) {\n\t return this._queues.queued(priority);\n\t }\n\n\t clusterQueued() {\n\t return this._store.__queued__();\n\t }\n\n\t empty() {\n\t return this.queued() === 0 && this._submitLock.isEmpty();\n\t }\n\n\t running() {\n\t return this._store.__running__();\n\t }\n\n\t done() {\n\t return this._store.__done__();\n\t }\n\n\t jobStatus(id) {\n\t return this._states.jobStatus(id);\n\t }\n\n\t jobs(status) {\n\t return this._states.statusJobs(status);\n\t }\n\n\t counts() {\n\t return this._states.statusCounts();\n\t }\n\n\t _randomIndex() {\n\t return Math.random().toString(36).slice(2);\n\t }\n\n\t check(weight = 1) {\n\t return this._store.__check__(weight);\n\t }\n\n\t _clearGlobalState(index) {\n\t if (this._scheduled[index] != null) {\n\t clearTimeout(this._scheduled[index].expiration);\n\t delete this._scheduled[index];\n\t return true;\n\t } else {\n\t return false;\n\t }\n\t }\n\n\t async _free(index, job, options, eventInfo) {\n\t var e, running;\n\t try {\n\t ({running} = (await this._store.__free__(index, options.weight)));\n\t this.Events.trigger(\"debug\", `Freed ${options.id}`, eventInfo);\n\t if (running === 0 && this.empty()) {\n\t return this.Events.trigger(\"idle\");\n\t }\n\t } catch (error1) {\n\t e = error1;\n\t return this.Events.trigger(\"error\", e);\n\t }\n\t }\n\n\t _run(index, job, wait) {\n\t var clearGlobalState, free, run;\n\t job.doRun();\n\t clearGlobalState = this._clearGlobalState.bind(this, index);\n\t run = this._run.bind(this, index, job);\n\t free = this._free.bind(this, index, job);\n\t return this._scheduled[index] = {\n\t timeout: setTimeout(() => {\n\t return job.doExecute(this._limiter, clearGlobalState, run, free);\n\t }, wait),\n\t expiration: job.options.expiration != null ? setTimeout(function() {\n\t return job.doExpire(clearGlobalState, run, free);\n\t }, wait + job.options.expiration) : void 0,\n\t job: job\n\t };\n\t }\n\n\t _drainOne(capacity) {\n\t return this._registerLock.schedule(() => {\n\t var args, index, next, options, queue;\n\t if (this.queued() === 0) {\n\t return this.Promise.resolve(null);\n\t }\n\t queue = this._queues.getFirst();\n\t ({options, args} = next = queue.first());\n\t if ((capacity != null) && options.weight > capacity) {\n\t return this.Promise.resolve(null);\n\t }\n\t this.Events.trigger(\"debug\", `Draining ${options.id}`, {args, options});\n\t index = this._randomIndex();\n\t return this._store.__register__(index, options.weight, options.expiration).then(({success, wait, reservoir}) => {\n\t var empty;\n\t this.Events.trigger(\"debug\", `Drained ${options.id}`, {success, args, options});\n\t if (success) {\n\t queue.shift();\n\t empty = this.empty();\n\t if (empty) {\n\t this.Events.trigger(\"empty\");\n\t }\n\t if (reservoir === 0) {\n\t this.Events.trigger(\"depleted\", empty);\n\t }\n\t this._run(index, next, wait);\n\t return this.Promise.resolve(options.weight);\n\t } else {\n\t return this.Promise.resolve(null);\n\t }\n\t });\n\t });\n\t }\n\n\t _drainAll(capacity, total = 0) {\n\t return this._drainOne(capacity).then((drained) => {\n\t var newCapacity;\n\t if (drained != null) {\n\t newCapacity = capacity != null ? capacity - drained : capacity;\n\t return this._drainAll(newCapacity, total + drained);\n\t } else {\n\t return this.Promise.resolve(total);\n\t }\n\t }).catch((e) => {\n\t return this.Events.trigger(\"error\", e);\n\t });\n\t }\n\n\t _dropAllQueued(message) {\n\t return this._queues.shiftAll(function(job) {\n\t return job.doDrop({message});\n\t });\n\t }\n\n\t stop(options = {}) {\n\t var done, waitForExecuting;\n\t options = parser$5.load(options, this.stopDefaults);\n\t waitForExecuting = (at) => {\n\t var finished;\n\t finished = () => {\n\t var counts;\n\t counts = this._states.counts;\n\t return (counts[0] + counts[1] + counts[2] + counts[3]) === at;\n\t };\n\t return new this.Promise((resolve, reject) => {\n\t if (finished()) {\n\t return resolve();\n\t } else {\n\t return this.on(\"done\", () => {\n\t if (finished()) {\n\t this.removeAllListeners(\"done\");\n\t return resolve();\n\t }\n\t });\n\t }\n\t });\n\t };\n\t done = options.dropWaitingJobs ? (this._run = function(index, next) {\n\t return next.doDrop({\n\t message: options.dropErrorMessage\n\t });\n\t }, this._drainOne = () => {\n\t return this.Promise.resolve(null);\n\t }, this._registerLock.schedule(() => {\n\t return this._submitLock.schedule(() => {\n\t var k, ref, v;\n\t ref = this._scheduled;\n\t for (k in ref) {\n\t v = ref[k];\n\t if (this.jobStatus(v.job.options.id) === \"RUNNING\") {\n\t clearTimeout(v.timeout);\n\t clearTimeout(v.expiration);\n\t v.job.doDrop({\n\t message: options.dropErrorMessage\n\t });\n\t }\n\t }\n\t this._dropAllQueued(options.dropErrorMessage);\n\t return waitForExecuting(0);\n\t });\n\t })) : this.schedule({\n\t priority: NUM_PRIORITIES$1 - 1,\n\t weight: 0\n\t }, () => {\n\t return waitForExecuting(1);\n\t });\n\t this._receive = function(job) {\n\t return job._reject(new Bottleneck.prototype.BottleneckError(options.enqueueErrorMessage));\n\t };\n\t this.stop = () => {\n\t return this.Promise.reject(new Bottleneck.prototype.BottleneckError(\"stop() has already been called\"));\n\t };\n\t return done;\n\t }\n\n\t async _addToQueue(job) {\n\t var args, blocked, error, options, reachedHWM, shifted, strategy;\n\t ({args, options} = job);\n\t try {\n\t ({reachedHWM, blocked, strategy} = (await this._store.__submit__(this.queued(), options.weight)));\n\t } catch (error1) {\n\t error = error1;\n\t this.Events.trigger(\"debug\", `Could not queue ${options.id}`, {args, options, error});\n\t job.doDrop({error});\n\t return false;\n\t }\n\t if (blocked) {\n\t job.doDrop();\n\t return true;\n\t } else if (reachedHWM) {\n\t shifted = strategy === Bottleneck.prototype.strategy.LEAK ? this._queues.shiftLastFrom(options.priority) : strategy === Bottleneck.prototype.strategy.OVERFLOW_PRIORITY ? this._queues.shiftLastFrom(options.priority + 1) : strategy === Bottleneck.prototype.strategy.OVERFLOW ? job : void 0;\n\t if (shifted != null) {\n\t shifted.doDrop();\n\t }\n\t if ((shifted == null) || strategy === Bottleneck.prototype.strategy.OVERFLOW) {\n\t if (shifted == null) {\n\t job.doDrop();\n\t }\n\t return reachedHWM;\n\t }\n\t }\n\t job.doQueue(reachedHWM, blocked);\n\t this._queues.push(job);\n\t await this._drainAll();\n\t return reachedHWM;\n\t }\n\n\t _receive(job) {\n\t if (this._states.jobStatus(job.options.id) != null) {\n\t job._reject(new Bottleneck.prototype.BottleneckError(`A job with the same id already exists (id=${job.options.id})`));\n\t return false;\n\t } else {\n\t job.doReceive();\n\t return this._submitLock.schedule(this._addToQueue, job);\n\t }\n\t }\n\n\t submit(...args) {\n\t var cb, fn, job, options, ref, ref1, task;\n\t if (typeof args[0] === \"function\") {\n\t ref = args, [fn, ...args] = ref, [cb] = splice.call(args, -1);\n\t options = parser$5.load({}, this.jobDefaults);\n\t } else {\n\t ref1 = args, [options, fn, ...args] = ref1, [cb] = splice.call(args, -1);\n\t options = parser$5.load(options, this.jobDefaults);\n\t }\n\t task = (...args) => {\n\t return new this.Promise(function(resolve, reject) {\n\t return fn(...args, function(...args) {\n\t return (args[0] != null ? reject : resolve)(args);\n\t });\n\t });\n\t };\n\t job = new Job$1(task, args, options, this.jobDefaults, this.rejectOnDrop, this.Events, this._states, this.Promise);\n\t job.promise.then(function(args) {\n\t return typeof cb === \"function\" ? cb(...args) : void 0;\n\t }).catch(function(args) {\n\t if (Array.isArray(args)) {\n\t return typeof cb === \"function\" ? cb(...args) : void 0;\n\t } else {\n\t return typeof cb === \"function\" ? cb(args) : void 0;\n\t }\n\t });\n\t return this._receive(job);\n\t }\n\n\t schedule(...args) {\n\t var job, options, task;\n\t if (typeof args[0] === \"function\") {\n\t [task, ...args] = args;\n\t options = {};\n\t } else {\n\t [options, task, ...args] = args;\n\t }\n\t job = new Job$1(task, args, options, this.jobDefaults, this.rejectOnDrop, this.Events, this._states, this.Promise);\n\t this._receive(job);\n\t return job.promise;\n\t }\n\n\t wrap(fn) {\n\t var schedule, wrapped;\n\t schedule = this.schedule.bind(this);\n\t wrapped = function(...args) {\n\t return schedule(fn.bind(this), ...args);\n\t };\n\t wrapped.withOptions = function(options, ...args) {\n\t return schedule(options, fn, ...args);\n\t };\n\t return wrapped;\n\t }\n\n\t async updateSettings(options = {}) {\n\t await this._store.__updateSettings__(parser$5.overwrite(options, this.storeDefaults));\n\t parser$5.overwrite(options, this.instanceDefaults, this);\n\t return this;\n\t }\n\n\t currentReservoir() {\n\t return this._store.__currentReservoir__();\n\t }\n\n\t incrementReservoir(incr = 0) {\n\t return this._store.__incrementReservoir__(incr);\n\t }\n\n\t }\n\t Bottleneck.default = Bottleneck;\n\n\t Bottleneck.Events = Events$4;\n\n\t Bottleneck.version = Bottleneck.prototype.version = require$$8.version;\n\n\t Bottleneck.strategy = Bottleneck.prototype.strategy = {\n\t LEAK: 1,\n\t OVERFLOW: 2,\n\t OVERFLOW_PRIORITY: 4,\n\t BLOCK: 3\n\t };\n\n\t Bottleneck.BottleneckError = Bottleneck.prototype.BottleneckError = BottleneckError_1;\n\n\t Bottleneck.Group = Bottleneck.prototype.Group = Group_1;\n\n\t Bottleneck.RedisConnection = Bottleneck.prototype.RedisConnection = require$$2;\n\n\t Bottleneck.IORedisConnection = Bottleneck.prototype.IORedisConnection = require$$3;\n\n\t Bottleneck.Batcher = Bottleneck.prototype.Batcher = Batcher_1;\n\n\t Bottleneck.prototype.jobDefaults = {\n\t priority: DEFAULT_PRIORITY$1,\n\t weight: 1,\n\t expiration: null,\n\t id: \"\"\n\t };\n\n\t Bottleneck.prototype.storeDefaults = {\n\t maxConcurrent: null,\n\t minTime: 0,\n\t highWater: null,\n\t strategy: Bottleneck.prototype.strategy.LEAK,\n\t penalty: null,\n\t reservoir: null,\n\t reservoirRefreshInterval: null,\n\t reservoirRefreshAmount: null,\n\t reservoirIncreaseInterval: null,\n\t reservoirIncreaseAmount: null,\n\t reservoirIncreaseMaximum: null\n\t };\n\n\t Bottleneck.prototype.localStoreDefaults = {\n\t Promise: Promise,\n\t timeout: null,\n\t heartbeatInterval: 250\n\t };\n\n\t Bottleneck.prototype.redisStoreDefaults = {\n\t Promise: Promise,\n\t timeout: null,\n\t heartbeatInterval: 5000,\n\t clientTimeout: 10000,\n\t Redis: null,\n\t clientOptions: {},\n\t clusterNodes: null,\n\t clearDatastore: false,\n\t connection: null\n\t };\n\n\t Bottleneck.prototype.instanceDefaults = {\n\t datastore: \"local\",\n\t connection: null,\n\t id: \"\",\n\t rejectOnDrop: true,\n\t trackDoneStatus: false,\n\t Promise: Promise\n\t };\n\n\t Bottleneck.prototype.stopDefaults = {\n\t enqueueErrorMessage: \"This limiter has been stopped and cannot accept new jobs.\",\n\t dropWaitingJobs: true,\n\t dropErrorMessage: \"This limiter has been stopped.\"\n\t };\n\n\t return Bottleneck;\n\n\t}).call(commonjsGlobal);\n\n\tvar Bottleneck_1 = Bottleneck;\n\n\tvar lib = Bottleneck_1;\n\n\treturn lib;\n\n})));\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nclass Deprecation extends Error {\n constructor(message) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = 'Deprecation';\n }\n\n}\n\nexports.Deprecation = Deprecation;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n/*!\n * is-plain-object \n *\n * Copyright (c) 2014-2017, Jon Schlinkert.\n * Released under the MIT License.\n */\n\nfunction isObject(o) {\n return Object.prototype.toString.call(o) === '[object Object]';\n}\n\nfunction isPlainObject(o) {\n var ctor,prot;\n\n if (isObject(o) === false) return false;\n\n // If has modified constructor\n ctor = o.constructor;\n if (ctor === undefined) return true;\n\n // If has modified prototype\n prot = ctor.prototype;\n if (isObject(prot) === false) return false;\n\n // If constructor does not have an Object-specific method\n if (prot.hasOwnProperty('isPrototypeOf') === false) {\n return false;\n }\n\n // Most likely a plain Object\n return true;\n}\n\nexports.isPlainObject = isPlainObject;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar Stream = _interopDefault(require('stream'));\nvar http = _interopDefault(require('http'));\nvar Url = _interopDefault(require('url'));\nvar whatwgUrl = _interopDefault(require('whatwg-url'));\nvar https = _interopDefault(require('https'));\nvar zlib = _interopDefault(require('zlib'));\n\n// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js\n\n// fix for \"Readable\" isn't a named export issue\nconst Readable = Stream.Readable;\n\nconst BUFFER = Symbol('buffer');\nconst TYPE = Symbol('type');\n\nclass Blob {\n\tconstructor() {\n\t\tthis[TYPE] = '';\n\n\t\tconst blobParts = arguments[0];\n\t\tconst options = arguments[1];\n\n\t\tconst buffers = [];\n\t\tlet size = 0;\n\n\t\tif (blobParts) {\n\t\t\tconst a = blobParts;\n\t\t\tconst length = Number(a.length);\n\t\t\tfor (let i = 0; i < length; i++) {\n\t\t\t\tconst element = a[i];\n\t\t\t\tlet buffer;\n\t\t\t\tif (element instanceof Buffer) {\n\t\t\t\t\tbuffer = element;\n\t\t\t\t} else if (ArrayBuffer.isView(element)) {\n\t\t\t\t\tbuffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);\n\t\t\t\t} else if (element instanceof ArrayBuffer) {\n\t\t\t\t\tbuffer = Buffer.from(element);\n\t\t\t\t} else if (element instanceof Blob) {\n\t\t\t\t\tbuffer = element[BUFFER];\n\t\t\t\t} else {\n\t\t\t\t\tbuffer = Buffer.from(typeof element === 'string' ? element : String(element));\n\t\t\t\t}\n\t\t\t\tsize += buffer.length;\n\t\t\t\tbuffers.push(buffer);\n\t\t\t}\n\t\t}\n\n\t\tthis[BUFFER] = Buffer.concat(buffers);\n\n\t\tlet type = options && options.type !== undefined && String(options.type).toLowerCase();\n\t\tif (type && !/[^\\u0020-\\u007E]/.test(type)) {\n\t\t\tthis[TYPE] = type;\n\t\t}\n\t}\n\tget size() {\n\t\treturn this[BUFFER].length;\n\t}\n\tget type() {\n\t\treturn this[TYPE];\n\t}\n\ttext() {\n\t\treturn Promise.resolve(this[BUFFER].toString());\n\t}\n\tarrayBuffer() {\n\t\tconst buf = this[BUFFER];\n\t\tconst ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\treturn Promise.resolve(ab);\n\t}\n\tstream() {\n\t\tconst readable = new Readable();\n\t\treadable._read = function () {};\n\t\treadable.push(this[BUFFER]);\n\t\treadable.push(null);\n\t\treturn readable;\n\t}\n\ttoString() {\n\t\treturn '[object Blob]';\n\t}\n\tslice() {\n\t\tconst size = this.size;\n\n\t\tconst start = arguments[0];\n\t\tconst end = arguments[1];\n\t\tlet relativeStart, relativeEnd;\n\t\tif (start === undefined) {\n\t\t\trelativeStart = 0;\n\t\t} else if (start < 0) {\n\t\t\trelativeStart = Math.max(size + start, 0);\n\t\t} else {\n\t\t\trelativeStart = Math.min(start, size);\n\t\t}\n\t\tif (end === undefined) {\n\t\t\trelativeEnd = size;\n\t\t} else if (end < 0) {\n\t\t\trelativeEnd = Math.max(size + end, 0);\n\t\t} else {\n\t\t\trelativeEnd = Math.min(end, size);\n\t\t}\n\t\tconst span = Math.max(relativeEnd - relativeStart, 0);\n\n\t\tconst buffer = this[BUFFER];\n\t\tconst slicedBuffer = buffer.slice(relativeStart, relativeStart + span);\n\t\tconst blob = new Blob([], { type: arguments[2] });\n\t\tblob[BUFFER] = slicedBuffer;\n\t\treturn blob;\n\t}\n}\n\nObject.defineProperties(Blob.prototype, {\n\tsize: { enumerable: true },\n\ttype: { enumerable: true },\n\tslice: { enumerable: true }\n});\n\nObject.defineProperty(Blob.prototype, Symbol.toStringTag, {\n\tvalue: 'Blob',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * fetch-error.js\n *\n * FetchError interface for operational errors\n */\n\n/**\n * Create FetchError instance\n *\n * @param String message Error message for human\n * @param String type Error type for machine\n * @param String systemError For Node.js system error\n * @return FetchError\n */\nfunction FetchError(message, type, systemError) {\n Error.call(this, message);\n\n this.message = message;\n this.type = type;\n\n // when err.type is `system`, err.code contains system error code\n if (systemError) {\n this.code = this.errno = systemError.code;\n }\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nFetchError.prototype = Object.create(Error.prototype);\nFetchError.prototype.constructor = FetchError;\nFetchError.prototype.name = 'FetchError';\n\nlet convert;\ntry {\n\tconvert = require('encoding').convert;\n} catch (e) {}\n\nconst INTERNALS = Symbol('Body internals');\n\n// fix an issue where \"PassThrough\" isn't a named export for node <10\nconst PassThrough = Stream.PassThrough;\n\n/**\n * Body mixin\n *\n * Ref: https://fetch.spec.whatwg.org/#body\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nfunction Body(body) {\n\tvar _this = this;\n\n\tvar _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},\n\t _ref$size = _ref.size;\n\n\tlet size = _ref$size === undefined ? 0 : _ref$size;\n\tvar _ref$timeout = _ref.timeout;\n\tlet timeout = _ref$timeout === undefined ? 0 : _ref$timeout;\n\n\tif (body == null) {\n\t\t// body is undefined or null\n\t\tbody = null;\n\t} else if (isURLSearchParams(body)) {\n\t\t// body is a URLSearchParams\n\t\tbody = Buffer.from(body.toString());\n\t} else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {\n\t\t// body is ArrayBuffer\n\t\tbody = Buffer.from(body);\n\t} else if (ArrayBuffer.isView(body)) {\n\t\t// body is ArrayBufferView\n\t\tbody = Buffer.from(body.buffer, body.byteOffset, body.byteLength);\n\t} else if (body instanceof Stream) ; else {\n\t\t// none of the above\n\t\t// coerce to string then buffer\n\t\tbody = Buffer.from(String(body));\n\t}\n\tthis[INTERNALS] = {\n\t\tbody,\n\t\tdisturbed: false,\n\t\terror: null\n\t};\n\tthis.size = size;\n\tthis.timeout = timeout;\n\n\tif (body instanceof Stream) {\n\t\tbody.on('error', function (err) {\n\t\t\tconst error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);\n\t\t\t_this[INTERNALS].error = error;\n\t\t});\n\t}\n}\n\nBody.prototype = {\n\tget body() {\n\t\treturn this[INTERNALS].body;\n\t},\n\n\tget bodyUsed() {\n\t\treturn this[INTERNALS].disturbed;\n\t},\n\n\t/**\n * Decode response as ArrayBuffer\n *\n * @return Promise\n */\n\tarrayBuffer() {\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\t});\n\t},\n\n\t/**\n * Return raw response as Blob\n *\n * @return Promise\n */\n\tblob() {\n\t\tlet ct = this.headers && this.headers.get('content-type') || '';\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn Object.assign(\n\t\t\t// Prevent copying\n\t\t\tnew Blob([], {\n\t\t\t\ttype: ct.toLowerCase()\n\t\t\t}), {\n\t\t\t\t[BUFFER]: buf\n\t\t\t});\n\t\t});\n\t},\n\n\t/**\n * Decode response as json\n *\n * @return Promise\n */\n\tjson() {\n\t\tvar _this2 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\ttry {\n\t\t\t\treturn JSON.parse(buffer.toString());\n\t\t\t} catch (err) {\n\t\t\t\treturn Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));\n\t\t\t}\n\t\t});\n\t},\n\n\t/**\n * Decode response as text\n *\n * @return Promise\n */\n\ttext() {\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn buffer.toString();\n\t\t});\n\t},\n\n\t/**\n * Decode response as buffer (non-spec api)\n *\n * @return Promise\n */\n\tbuffer() {\n\t\treturn consumeBody.call(this);\n\t},\n\n\t/**\n * Decode response as text, while automatically detecting the encoding and\n * trying to decode to UTF-8 (non-spec api)\n *\n * @return Promise\n */\n\ttextConverted() {\n\t\tvar _this3 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn convertBody(buffer, _this3.headers);\n\t\t});\n\t}\n};\n\n// In browsers, all properties are enumerable.\nObject.defineProperties(Body.prototype, {\n\tbody: { enumerable: true },\n\tbodyUsed: { enumerable: true },\n\tarrayBuffer: { enumerable: true },\n\tblob: { enumerable: true },\n\tjson: { enumerable: true },\n\ttext: { enumerable: true }\n});\n\nBody.mixIn = function (proto) {\n\tfor (const name of Object.getOwnPropertyNames(Body.prototype)) {\n\t\t// istanbul ignore else: future proof\n\t\tif (!(name in proto)) {\n\t\t\tconst desc = Object.getOwnPropertyDescriptor(Body.prototype, name);\n\t\t\tObject.defineProperty(proto, name, desc);\n\t\t}\n\t}\n};\n\n/**\n * Consume and convert an entire Body to a Buffer.\n *\n * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body\n *\n * @return Promise\n */\nfunction consumeBody() {\n\tvar _this4 = this;\n\n\tif (this[INTERNALS].disturbed) {\n\t\treturn Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));\n\t}\n\n\tthis[INTERNALS].disturbed = true;\n\n\tif (this[INTERNALS].error) {\n\t\treturn Body.Promise.reject(this[INTERNALS].error);\n\t}\n\n\tlet body = this.body;\n\n\t// body is null\n\tif (body === null) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is blob\n\tif (isBlob(body)) {\n\t\tbody = body.stream();\n\t}\n\n\t// body is buffer\n\tif (Buffer.isBuffer(body)) {\n\t\treturn Body.Promise.resolve(body);\n\t}\n\n\t// istanbul ignore if: should never happen\n\tif (!(body instanceof Stream)) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is stream\n\t// get ready to actually consume the body\n\tlet accum = [];\n\tlet accumBytes = 0;\n\tlet abort = false;\n\n\treturn new Body.Promise(function (resolve, reject) {\n\t\tlet resTimeout;\n\n\t\t// allow timeout on slow response body\n\t\tif (_this4.timeout) {\n\t\t\tresTimeout = setTimeout(function () {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));\n\t\t\t}, _this4.timeout);\n\t\t}\n\n\t\t// handle stream errors\n\t\tbody.on('error', function (err) {\n\t\t\tif (err.name === 'AbortError') {\n\t\t\t\t// if the request was aborted, reject with this Error\n\t\t\t\tabort = true;\n\t\t\t\treject(err);\n\t\t\t} else {\n\t\t\t\t// other errors, such as incorrect content-encoding\n\t\t\t\treject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\n\t\tbody.on('data', function (chunk) {\n\t\t\tif (abort || chunk === null) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (_this4.size && accumBytes + chunk.length > _this4.size) {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\taccumBytes += chunk.length;\n\t\t\taccum.push(chunk);\n\t\t});\n\n\t\tbody.on('end', function () {\n\t\t\tif (abort) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tclearTimeout(resTimeout);\n\n\t\t\ttry {\n\t\t\t\tresolve(Buffer.concat(accum, accumBytes));\n\t\t\t} catch (err) {\n\t\t\t\t// handle streams that have accumulated too much data (issue #414)\n\t\t\t\treject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\t});\n}\n\n/**\n * Detect buffer encoding and convert to target encoding\n * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding\n *\n * @param Buffer buffer Incoming buffer\n * @param String encoding Target encoding\n * @return String\n */\nfunction convertBody(buffer, headers) {\n\tif (typeof convert !== 'function') {\n\t\tthrow new Error('The package `encoding` must be installed to use the textConverted() function');\n\t}\n\n\tconst ct = headers.get('content-type');\n\tlet charset = 'utf-8';\n\tlet res, str;\n\n\t// header\n\tif (ct) {\n\t\tres = /charset=([^;]*)/i.exec(ct);\n\t}\n\n\t// no charset in content type, peek at response body for at most 1024 bytes\n\tstr = buffer.slice(0, 1024).toString();\n\n\t// html5\n\tif (!res && str) {\n\t\tres = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;\n\n\t\tthis[MAP] = Object.create(null);\n\n\t\tif (init instanceof Headers) {\n\t\t\tconst rawHeaders = init.raw();\n\t\t\tconst headerNames = Object.keys(rawHeaders);\n\n\t\t\tfor (const headerName of headerNames) {\n\t\t\t\tfor (const value of rawHeaders[headerName]) {\n\t\t\t\t\tthis.append(headerName, value);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn;\n\t\t}\n\n\t\t// We don't worry about converting prop to ByteString here as append()\n\t\t// will handle it.\n\t\tif (init == null) ; else if (typeof init === 'object') {\n\t\t\tconst method = init[Symbol.iterator];\n\t\t\tif (method != null) {\n\t\t\t\tif (typeof method !== 'function') {\n\t\t\t\t\tthrow new TypeError('Header pairs must be iterable');\n\t\t\t\t}\n\n\t\t\t\t// sequence>\n\t\t\t\t// Note: per spec we have to first exhaust the lists then process them\n\t\t\t\tconst pairs = [];\n\t\t\t\tfor (const pair of init) {\n\t\t\t\t\tif (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be iterable');\n\t\t\t\t\t}\n\t\t\t\t\tpairs.push(Array.from(pair));\n\t\t\t\t}\n\n\t\t\t\tfor (const pair of pairs) {\n\t\t\t\t\tif (pair.length !== 2) {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be a name/value tuple');\n\t\t\t\t\t}\n\t\t\t\t\tthis.append(pair[0], pair[1]);\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// record\n\t\t\t\tfor (const key of Object.keys(init)) {\n\t\t\t\t\tconst value = init[key];\n\t\t\t\t\tthis.append(key, value);\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tthrow new TypeError('Provided initializer must be an object');\n\t\t}\n\t}\n\n\t/**\n * Return combined header value given name\n *\n * @param String name Header name\n * @return Mixed\n */\n\tget(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key === undefined) {\n\t\t\treturn null;\n\t\t}\n\n\t\treturn this[MAP][key].join(', ');\n\t}\n\n\t/**\n * Iterate over all headers\n *\n * @param Function callback Executed for each item with parameters (value, name, thisArg)\n * @param Boolean thisArg `this` context for callback function\n * @return Void\n */\n\tforEach(callback) {\n\t\tlet thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;\n\n\t\tlet pairs = getHeaders(this);\n\t\tlet i = 0;\n\t\twhile (i < pairs.length) {\n\t\t\tvar _pairs$i = pairs[i];\n\t\t\tconst name = _pairs$i[0],\n\t\t\t value = _pairs$i[1];\n\n\t\t\tcallback.call(thisArg, value, name, this);\n\t\t\tpairs = getHeaders(this);\n\t\t\ti++;\n\t\t}\n\t}\n\n\t/**\n * Overwrite header values given name\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tset(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tthis[MAP][key !== undefined ? key : name] = [value];\n\t}\n\n\t/**\n * Append a value onto existing header\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tappend(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tthis[MAP][key].push(value);\n\t\t} else {\n\t\t\tthis[MAP][name] = [value];\n\t\t}\n\t}\n\n\t/**\n * Check for header name existence\n *\n * @param String name Header name\n * @return Boolean\n */\n\thas(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\treturn find(this[MAP], name) !== undefined;\n\t}\n\n\t/**\n * Delete all header values given name\n *\n * @param String name Header name\n * @return Void\n */\n\tdelete(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tdelete this[MAP][key];\n\t\t}\n\t}\n\n\t/**\n * Return raw headers (non-spec api)\n *\n * @return Object\n */\n\traw() {\n\t\treturn this[MAP];\n\t}\n\n\t/**\n * Get an iterator on keys.\n *\n * @return Iterator\n */\n\tkeys() {\n\t\treturn createHeadersIterator(this, 'key');\n\t}\n\n\t/**\n * Get an iterator on values.\n *\n * @return Iterator\n */\n\tvalues() {\n\t\treturn createHeadersIterator(this, 'value');\n\t}\n\n\t/**\n * Get an iterator on entries.\n *\n * This is the default iterator of the Headers object.\n *\n * @return Iterator\n */\n\t[Symbol.iterator]() {\n\t\treturn createHeadersIterator(this, 'key+value');\n\t}\n}\nHeaders.prototype.entries = Headers.prototype[Symbol.iterator];\n\nObject.defineProperty(Headers.prototype, Symbol.toStringTag, {\n\tvalue: 'Headers',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Headers.prototype, {\n\tget: { enumerable: true },\n\tforEach: { enumerable: true },\n\tset: { enumerable: true },\n\tappend: { enumerable: true },\n\thas: { enumerable: true },\n\tdelete: { enumerable: true },\n\tkeys: { enumerable: true },\n\tvalues: { enumerable: true },\n\tentries: { enumerable: true }\n});\n\nfunction getHeaders(headers) {\n\tlet kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';\n\n\tconst keys = Object.keys(headers[MAP]).sort();\n\treturn keys.map(kind === 'key' ? function (k) {\n\t\treturn k.toLowerCase();\n\t} : kind === 'value' ? function (k) {\n\t\treturn headers[MAP][k].join(', ');\n\t} : function (k) {\n\t\treturn [k.toLowerCase(), headers[MAP][k].join(', ')];\n\t});\n}\n\nconst INTERNAL = Symbol('internal');\n\nfunction createHeadersIterator(target, kind) {\n\tconst iterator = Object.create(HeadersIteratorPrototype);\n\titerator[INTERNAL] = {\n\t\ttarget,\n\t\tkind,\n\t\tindex: 0\n\t};\n\treturn iterator;\n}\n\nconst HeadersIteratorPrototype = Object.setPrototypeOf({\n\tnext() {\n\t\t// istanbul ignore if\n\t\tif (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {\n\t\t\tthrow new TypeError('Value of `this` is not a HeadersIterator');\n\t\t}\n\n\t\tvar _INTERNAL = this[INTERNAL];\n\t\tconst target = _INTERNAL.target,\n\t\t kind = _INTERNAL.kind,\n\t\t index = _INTERNAL.index;\n\n\t\tconst values = getHeaders(target, kind);\n\t\tconst len = values.length;\n\t\tif (index >= len) {\n\t\t\treturn {\n\t\t\t\tvalue: undefined,\n\t\t\t\tdone: true\n\t\t\t};\n\t\t}\n\n\t\tthis[INTERNAL].index = index + 1;\n\n\t\treturn {\n\t\t\tvalue: values[index],\n\t\t\tdone: false\n\t\t};\n\t}\n}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));\n\nObject.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {\n\tvalue: 'HeadersIterator',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * Export the Headers object in a form that Node.js can consume.\n *\n * @param Headers headers\n * @return Object\n */\nfunction exportNodeCompatibleHeaders(headers) {\n\tconst obj = Object.assign({ __proto__: null }, headers[MAP]);\n\n\t// http.request() only supports string as Host header. This hack makes\n\t// specifying custom Host header possible.\n\tconst hostHeaderKey = find(headers[MAP], 'Host');\n\tif (hostHeaderKey !== undefined) {\n\t\tobj[hostHeaderKey] = obj[hostHeaderKey][0];\n\t}\n\n\treturn obj;\n}\n\n/**\n * Create a Headers object from an object of headers, ignoring those that do\n * not conform to HTTP grammar productions.\n *\n * @param Object obj Object of headers\n * @return Headers\n */\nfunction createHeadersLenient(obj) {\n\tconst headers = new Headers();\n\tfor (const name of Object.keys(obj)) {\n\t\tif (invalidTokenRegex.test(name)) {\n\t\t\tcontinue;\n\t\t}\n\t\tif (Array.isArray(obj[name])) {\n\t\t\tfor (const val of obj[name]) {\n\t\t\t\tif (invalidHeaderCharRegex.test(val)) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tif (headers[MAP][name] === undefined) {\n\t\t\t\t\theaders[MAP][name] = [val];\n\t\t\t\t} else {\n\t\t\t\t\theaders[MAP][name].push(val);\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (!invalidHeaderCharRegex.test(obj[name])) {\n\t\t\theaders[MAP][name] = [obj[name]];\n\t\t}\n\t}\n\treturn headers;\n}\n\nconst INTERNALS$1 = Symbol('Response internals');\n\n// fix an issue where \"STATUS_CODES\" aren't a named export for node <10\nconst STATUS_CODES = http.STATUS_CODES;\n\n/**\n * Response class\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nclass Response {\n\tconstructor() {\n\t\tlet body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;\n\t\tlet opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tBody.call(this, body, opts);\n\n\t\tconst status = opts.status || 200;\n\t\tconst headers = new Headers(opts.headers);\n\n\t\tif (body != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(body);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tthis[INTERNALS$1] = {\n\t\t\turl: opts.url,\n\t\t\tstatus,\n\t\t\tstatusText: opts.statusText || STATUS_CODES[status],\n\t\t\theaders,\n\t\t\tcounter: opts.counter\n\t\t};\n\t}\n\n\tget url() {\n\t\treturn this[INTERNALS$1].url || '';\n\t}\n\n\tget status() {\n\t\treturn this[INTERNALS$1].status;\n\t}\n\n\t/**\n * Convenience property representing if the request ended normally\n */\n\tget ok() {\n\t\treturn this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;\n\t}\n\n\tget redirected() {\n\t\treturn this[INTERNALS$1].counter > 0;\n\t}\n\n\tget statusText() {\n\t\treturn this[INTERNALS$1].statusText;\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$1].headers;\n\t}\n\n\t/**\n * Clone this response\n *\n * @return Response\n */\n\tclone() {\n\t\treturn new Response(clone(this), {\n\t\t\turl: this.url,\n\t\t\tstatus: this.status,\n\t\t\tstatusText: this.statusText,\n\t\t\theaders: this.headers,\n\t\t\tok: this.ok,\n\t\t\tredirected: this.redirected\n\t\t});\n\t}\n}\n\nBody.mixIn(Response.prototype);\n\nObject.defineProperties(Response.prototype, {\n\turl: { enumerable: true },\n\tstatus: { enumerable: true },\n\tok: { enumerable: true },\n\tredirected: { enumerable: true },\n\tstatusText: { enumerable: true },\n\theaders: { enumerable: true },\n\tclone: { enumerable: true }\n});\n\nObject.defineProperty(Response.prototype, Symbol.toStringTag, {\n\tvalue: 'Response',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nconst INTERNALS$2 = Symbol('Request internals');\nconst URL = Url.URL || whatwgUrl.URL;\n\n// fix an issue where \"format\", \"parse\" aren't a named export for node <10\nconst parse_url = Url.parse;\nconst format_url = Url.format;\n\n/**\n * Wrapper around `new URL` to handle arbitrary URLs\n *\n * @param {string} urlStr\n * @return {void}\n */\nfunction parseURL(urlStr) {\n\t/*\n \tCheck whether the URL is absolute or not\n \t\tScheme: https://tools.ietf.org/html/rfc3986#section-3.1\n \tAbsolute URL: https://tools.ietf.org/html/rfc3986#section-4.3\n */\n\tif (/^[a-zA-Z][a-zA-Z\\d+\\-.]*:/.exec(urlStr)) {\n\t\turlStr = new URL(urlStr).toString();\n\t}\n\n\t// Fallback to old implementation for arbitrary URLs\n\treturn parse_url(urlStr);\n}\n\nconst streamDestructionSupported = 'destroy' in Stream.Readable.prototype;\n\n/**\n * Check if a value is an instance of Request.\n *\n * @param Mixed input\n * @return Boolean\n */\nfunction isRequest(input) {\n\treturn typeof input === 'object' && typeof input[INTERNALS$2] === 'object';\n}\n\nfunction isAbortSignal(signal) {\n\tconst proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);\n\treturn !!(proto && proto.constructor.name === 'AbortSignal');\n}\n\n/**\n * Request class\n *\n * @param Mixed input Url or Request instance\n * @param Object init Custom options\n * @return Void\n */\nclass Request {\n\tconstructor(input) {\n\t\tlet init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tlet parsedURL;\n\n\t\t// normalize input\n\t\tif (!isRequest(input)) {\n\t\t\tif (input && input.href) {\n\t\t\t\t// in order to support Node.js' Url objects; though WHATWG's URL objects\n\t\t\t\t// will fall into this branch also (since their `toString()` will return\n\t\t\t\t// `href` property anyway)\n\t\t\t\tparsedURL = parseURL(input.href);\n\t\t\t} else {\n\t\t\t\t// coerce input to a string before attempting to parse\n\t\t\t\tparsedURL = parseURL(`${input}`);\n\t\t\t}\n\t\t\tinput = {};\n\t\t} else {\n\t\t\tparsedURL = parseURL(input.url);\n\t\t}\n\n\t\tlet method = init.method || input.method || 'GET';\n\t\tmethod = method.toUpperCase();\n\n\t\tif ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {\n\t\t\tthrow new TypeError('Request with GET/HEAD method cannot have body');\n\t\t}\n\n\t\tlet inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;\n\n\t\tBody.call(this, inputBody, {\n\t\t\ttimeout: init.timeout || input.timeout || 0,\n\t\t\tsize: init.size || input.size || 0\n\t\t});\n\n\t\tconst headers = new Headers(init.headers || input.headers || {});\n\n\t\tif (inputBody != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(inputBody);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tlet signal = isRequest(input) ? input.signal : null;\n\t\tif ('signal' in init) signal = init.signal;\n\n\t\tif (signal != null && !isAbortSignal(signal)) {\n\t\t\tthrow new TypeError('Expected signal to be an instanceof AbortSignal');\n\t\t}\n\n\t\tthis[INTERNALS$2] = {\n\t\t\tmethod,\n\t\t\tredirect: init.redirect || input.redirect || 'follow',\n\t\t\theaders,\n\t\t\tparsedURL,\n\t\t\tsignal\n\t\t};\n\n\t\t// node-fetch-only options\n\t\tthis.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;\n\t\tthis.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;\n\t\tthis.counter = init.counter || input.counter || 0;\n\t\tthis.agent = init.agent || input.agent;\n\t}\n\n\tget method() {\n\t\treturn this[INTERNALS$2].method;\n\t}\n\n\tget url() {\n\t\treturn format_url(this[INTERNALS$2].parsedURL);\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$2].headers;\n\t}\n\n\tget redirect() {\n\t\treturn this[INTERNALS$2].redirect;\n\t}\n\n\tget signal() {\n\t\treturn this[INTERNALS$2].signal;\n\t}\n\n\t/**\n * Clone this request\n *\n * @return Request\n */\n\tclone() {\n\t\treturn new Request(this);\n\t}\n}\n\nBody.mixIn(Request.prototype);\n\nObject.defineProperty(Request.prototype, Symbol.toStringTag, {\n\tvalue: 'Request',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Request.prototype, {\n\tmethod: { enumerable: true },\n\turl: { enumerable: true },\n\theaders: { enumerable: true },\n\tredirect: { enumerable: true },\n\tclone: { enumerable: true },\n\tsignal: { enumerable: true }\n});\n\n/**\n * Convert a Request to Node.js http request options.\n *\n * @param Request A Request instance\n * @return Object The options object to be passed to http.request\n */\nfunction getNodeRequestOptions(request) {\n\tconst parsedURL = request[INTERNALS$2].parsedURL;\n\tconst headers = new Headers(request[INTERNALS$2].headers);\n\n\t// fetch step 1.3\n\tif (!headers.has('Accept')) {\n\t\theaders.set('Accept', '*/*');\n\t}\n\n\t// Basic fetch\n\tif (!parsedURL.protocol || !parsedURL.hostname) {\n\t\tthrow new TypeError('Only absolute URLs are supported');\n\t}\n\n\tif (!/^https?:$/.test(parsedURL.protocol)) {\n\t\tthrow new TypeError('Only HTTP(S) protocols are supported');\n\t}\n\n\tif (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {\n\t\tthrow new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');\n\t}\n\n\t// HTTP-network-or-cache fetch steps 2.4-2.7\n\tlet contentLengthValue = null;\n\tif (request.body == null && /^(POST|PUT)$/i.test(request.method)) {\n\t\tcontentLengthValue = '0';\n\t}\n\tif (request.body != null) {\n\t\tconst totalBytes = getTotalBytes(request);\n\t\tif (typeof totalBytes === 'number') {\n\t\t\tcontentLengthValue = String(totalBytes);\n\t\t}\n\t}\n\tif (contentLengthValue) {\n\t\theaders.set('Content-Length', contentLengthValue);\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.11\n\tif (!headers.has('User-Agent')) {\n\t\theaders.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.15\n\tif (request.compress && !headers.has('Accept-Encoding')) {\n\t\theaders.set('Accept-Encoding', 'gzip,deflate');\n\t}\n\n\tlet agent = request.agent;\n\tif (typeof agent === 'function') {\n\t\tagent = agent(parsedURL);\n\t}\n\n\tif (!headers.has('Connection') && !agent) {\n\t\theaders.set('Connection', 'close');\n\t}\n\n\t// HTTP-network fetch step 4.2\n\t// chunked encoding is handled by Node.js\n\n\treturn Object.assign({}, parsedURL, {\n\t\tmethod: request.method,\n\t\theaders: exportNodeCompatibleHeaders(headers),\n\t\tagent\n\t});\n}\n\n/**\n * abort-error.js\n *\n * AbortError interface for cancelled requests\n */\n\n/**\n * Create AbortError instance\n *\n * @param String message Error message for human\n * @return AbortError\n */\nfunction AbortError(message) {\n Error.call(this, message);\n\n this.type = 'aborted';\n this.message = message;\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nAbortError.prototype = Object.create(Error.prototype);\nAbortError.prototype.constructor = AbortError;\nAbortError.prototype.name = 'AbortError';\n\nconst URL$1 = Url.URL || whatwgUrl.URL;\n\n// fix an issue where \"PassThrough\", \"resolve\" aren't a named export for node <10\nconst PassThrough$1 = Stream.PassThrough;\n\nconst isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) {\n\tconst orig = new URL$1(original).hostname;\n\tconst dest = new URL$1(destination).hostname;\n\n\treturn orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest);\n};\n\n/**\n * Fetch function\n *\n * @param Mixed url Absolute url or Request instance\n * @param Object opts Fetch options\n * @return Promise\n */\nfunction fetch(url, opts) {\n\n\t// allow custom promise\n\tif (!fetch.Promise) {\n\t\tthrow new Error('native promise missing, set fetch.Promise to your favorite alternative');\n\t}\n\n\tBody.Promise = fetch.Promise;\n\n\t// wrap http.request into fetch\n\treturn new fetch.Promise(function (resolve, reject) {\n\t\t// build request object\n\t\tconst request = new Request(url, opts);\n\t\tconst options = getNodeRequestOptions(request);\n\n\t\tconst send = (options.protocol === 'https:' ? https : http).request;\n\t\tconst signal = request.signal;\n\n\t\tlet response = null;\n\n\t\tconst abort = function abort() {\n\t\t\tlet error = new AbortError('The user aborted a request.');\n\t\t\treject(error);\n\t\t\tif (request.body && request.body instanceof Stream.Readable) {\n\t\t\t\trequest.body.destroy(error);\n\t\t\t}\n\t\t\tif (!response || !response.body) return;\n\t\t\tresponse.body.emit('error', error);\n\t\t};\n\n\t\tif (signal && signal.aborted) {\n\t\t\tabort();\n\t\t\treturn;\n\t\t}\n\n\t\tconst abortAndFinalize = function abortAndFinalize() {\n\t\t\tabort();\n\t\t\tfinalize();\n\t\t};\n\n\t\t// send request\n\t\tconst req = send(options);\n\t\tlet reqTimeout;\n\n\t\tif (signal) {\n\t\t\tsignal.addEventListener('abort', abortAndFinalize);\n\t\t}\n\n\t\tfunction finalize() {\n\t\t\treq.abort();\n\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\tclearTimeout(reqTimeout);\n\t\t}\n\n\t\tif (request.timeout) {\n\t\t\treq.once('socket', function (socket) {\n\t\t\t\treqTimeout = setTimeout(function () {\n\t\t\t\t\treject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));\n\t\t\t\t\tfinalize();\n\t\t\t\t}, request.timeout);\n\t\t\t});\n\t\t}\n\n\t\treq.on('error', function (err) {\n\t\t\treject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));\n\t\t\tfinalize();\n\t\t});\n\n\t\treq.on('response', function (res) {\n\t\t\tclearTimeout(reqTimeout);\n\n\t\t\tconst headers = createHeadersLenient(res.headers);\n\n\t\t\t// HTTP fetch step 5\n\t\t\tif (fetch.isRedirect(res.statusCode)) {\n\t\t\t\t// HTTP fetch step 5.2\n\t\t\t\tconst location = headers.get('Location');\n\n\t\t\t\t// HTTP fetch step 5.3\n\t\t\t\tlet locationURL = null;\n\t\t\t\ttry {\n\t\t\t\t\tlocationURL = location === null ? null : new URL$1(location, request.url).toString();\n\t\t\t\t} catch (err) {\n\t\t\t\t\t// error here can only be invalid URL in Location: header\n\t\t\t\t\t// do not throw when options.redirect == manual\n\t\t\t\t\t// let the user extract the errorneous redirect URL\n\t\t\t\t\tif (request.redirect !== 'manual') {\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// HTTP fetch step 5.5\n\t\t\t\tswitch (request.redirect) {\n\t\t\t\t\tcase 'error':\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\tcase 'manual':\n\t\t\t\t\t\t// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.\n\t\t\t\t\t\tif (locationURL !== null) {\n\t\t\t\t\t\t\t// handle corrupted header\n\t\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\t\theaders.set('Location', locationURL);\n\t\t\t\t\t\t\t} catch (err) {\n\t\t\t\t\t\t\t\t// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request\n\t\t\t\t\t\t\t\treject(err);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'follow':\n\t\t\t\t\t\t// HTTP-redirect fetch step 2\n\t\t\t\t\t\tif (locationURL === null) {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 5\n\t\t\t\t\t\tif (request.counter >= request.follow) {\n\t\t\t\t\t\t\treject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 6 (counter increment)\n\t\t\t\t\t\t// Create a new Request object.\n\t\t\t\t\t\tconst requestOpts = {\n\t\t\t\t\t\t\theaders: new Headers(request.headers),\n\t\t\t\t\t\t\tfollow: request.follow,\n\t\t\t\t\t\t\tcounter: request.counter + 1,\n\t\t\t\t\t\t\tagent: request.agent,\n\t\t\t\t\t\t\tcompress: request.compress,\n\t\t\t\t\t\t\tmethod: request.method,\n\t\t\t\t\t\t\tbody: request.body,\n\t\t\t\t\t\t\tsignal: request.signal,\n\t\t\t\t\t\t\ttimeout: request.timeout,\n\t\t\t\t\t\t\tsize: request.size\n\t\t\t\t\t\t};\n\n\t\t\t\t\t\tif (!isDomainOrSubdomain(request.url, locationURL)) {\n\t\t\t\t\t\t\tfor (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) {\n\t\t\t\t\t\t\t\trequestOpts.headers.delete(name);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 9\n\t\t\t\t\t\tif (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {\n\t\t\t\t\t\t\treject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 11\n\t\t\t\t\t\tif (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {\n\t\t\t\t\t\t\trequestOpts.method = 'GET';\n\t\t\t\t\t\t\trequestOpts.body = undefined;\n\t\t\t\t\t\t\trequestOpts.headers.delete('content-length');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 15\n\t\t\t\t\t\tresolve(fetch(new Request(locationURL, requestOpts)));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// prepare response\n\t\t\tres.once('end', function () {\n\t\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\t});\n\t\t\tlet body = res.pipe(new PassThrough$1());\n\n\t\t\tconst response_options = {\n\t\t\t\turl: request.url,\n\t\t\t\tstatus: res.statusCode,\n\t\t\t\tstatusText: res.statusMessage,\n\t\t\t\theaders: headers,\n\t\t\t\tsize: request.size,\n\t\t\t\ttimeout: request.timeout,\n\t\t\t\tcounter: request.counter\n\t\t\t};\n\n\t\t\t// HTTP-network fetch step 12.1.1.3\n\t\t\tconst codings = headers.get('Content-Encoding');\n\n\t\t\t// HTTP-network fetch step 12.1.1.4: handle content codings\n\n\t\t\t// in following scenarios we ignore compression support\n\t\t\t// 1. compression support is disabled\n\t\t\t// 2. HEAD request\n\t\t\t// 3. no Content-Encoding header\n\t\t\t// 4. no content response (204)\n\t\t\t// 5. content not modified response (304)\n\t\t\tif (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// For Node v6+\n\t\t\t// Be less strict when decoding compressed responses, since sometimes\n\t\t\t// servers send slightly invalid responses that are still accepted\n\t\t\t// by common browsers.\n\t\t\t// Always using Z_SYNC_FLUSH is what cURL does.\n\t\t\tconst zlibOptions = {\n\t\t\t\tflush: zlib.Z_SYNC_FLUSH,\n\t\t\t\tfinishFlush: zlib.Z_SYNC_FLUSH\n\t\t\t};\n\n\t\t\t// for gzip\n\t\t\tif (codings == 'gzip' || codings == 'x-gzip') {\n\t\t\t\tbody = body.pipe(zlib.createGunzip(zlibOptions));\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for deflate\n\t\t\tif (codings == 'deflate' || codings == 'x-deflate') {\n\t\t\t\t// handle the infamous raw deflate response from old servers\n\t\t\t\t// a hack for old IIS and Apache servers\n\t\t\t\tconst raw = res.pipe(new PassThrough$1());\n\t\t\t\traw.once('data', function (chunk) {\n\t\t\t\t\t// see http://stackoverflow.com/questions/37519828\n\t\t\t\t\tif ((chunk[0] & 0x0F) === 0x08) {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflate());\n\t\t\t\t\t} else {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflateRaw());\n\t\t\t\t\t}\n\t\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\t\tresolve(response);\n\t\t\t\t});\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for br\n\t\t\tif (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {\n\t\t\t\tbody = body.pipe(zlib.createBrotliDecompress());\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// otherwise, use response as-is\n\t\t\tresponse = new Response(body, response_options);\n\t\t\tresolve(response);\n\t\t});\n\n\t\twriteToStream(req, request);\n\t});\n}\n/**\n * Redirect code matching\n *\n * @param Number code Status code\n * @return Boolean\n */\nfetch.isRedirect = function (code) {\n\treturn code === 301 || code === 302 || code === 303 || code === 307 || code === 308;\n};\n\n// expose Promise\nfetch.Promise = global.Promise;\n\nmodule.exports = exports = fetch;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = exports;\nexports.Headers = Headers;\nexports.Request = Request;\nexports.Response = Response;\nexports.FetchError = FetchError;\n","\"use strict\";\n\nvar punycode = require(\"punycode\");\nvar mappingTable = require(\"./lib/mappingTable.json\");\n\nvar PROCESSING_OPTIONS = {\n TRANSITIONAL: 0,\n NONTRANSITIONAL: 1\n};\n\nfunction normalize(str) { // fix bug in v8\n return str.split('\\u0000').map(function (s) { return s.normalize('NFC'); }).join('\\u0000');\n}\n\nfunction findStatus(val) {\n var start = 0;\n var end = mappingTable.length - 1;\n\n while (start <= end) {\n var mid = Math.floor((start + end) / 2);\n\n var target = mappingTable[mid];\n if (target[0][0] <= val && target[0][1] >= val) {\n return target;\n } else if (target[0][0] > val) {\n end = mid - 1;\n } else {\n start = mid + 1;\n }\n }\n\n return null;\n}\n\nvar regexAstralSymbols = /[\\uD800-\\uDBFF][\\uDC00-\\uDFFF]/g;\n\nfunction countSymbols(string) {\n return string\n // replace every surrogate pair with a BMP symbol\n .replace(regexAstralSymbols, '_')\n // then get the length\n .length;\n}\n\nfunction mapChars(domain_name, useSTD3, processing_option) {\n var hasError = false;\n var processed = \"\";\n\n var len = countSymbols(domain_name);\n for (var i = 0; i < len; ++i) {\n var codePoint = domain_name.codePointAt(i);\n var status = findStatus(codePoint);\n\n switch (status[1]) {\n case \"disallowed\":\n hasError = true;\n processed += String.fromCodePoint(codePoint);\n break;\n case \"ignored\":\n break;\n case \"mapped\":\n processed += String.fromCodePoint.apply(String, status[2]);\n break;\n case \"deviation\":\n if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) {\n processed += String.fromCodePoint.apply(String, status[2]);\n } else {\n processed += String.fromCodePoint(codePoint);\n }\n break;\n case \"valid\":\n processed += String.fromCodePoint(codePoint);\n break;\n case \"disallowed_STD3_mapped\":\n if (useSTD3) {\n hasError = true;\n processed += String.fromCodePoint(codePoint);\n } else {\n processed += String.fromCodePoint.apply(String, status[2]);\n }\n break;\n case \"disallowed_STD3_valid\":\n if (useSTD3) {\n hasError = true;\n }\n\n processed += String.fromCodePoint(codePoint);\n break;\n }\n }\n\n return {\n string: processed,\n error: hasError\n };\n}\n\nvar combiningMarksRegex = /[\\u0300-\\u036F\\u0483-\\u0489\\u0591-\\u05BD\\u05BF\\u05C1\\u05C2\\u05C4\\u05C5\\u05C7\\u0610-\\u061A\\u064B-\\u065F\\u0670\\u06D6-\\u06DC\\u06DF-\\u06E4\\u06E7\\u06E8\\u06EA-\\u06ED\\u0711\\u0730-\\u074A\\u07A6-\\u07B0\\u07EB-\\u07F3\\u0816-\\u0819\\u081B-\\u0823\\u0825-\\u0827\\u0829-\\u082D\\u0859-\\u085B\\u08E4-\\u0903\\u093A-\\u093C\\u093E-\\u094F\\u0951-\\u0957\\u0962\\u0963\\u0981-\\u0983\\u09BC\\u09BE-\\u09C4\\u09C7\\u09C8\\u09CB-\\u09CD\\u09D7\\u09E2\\u09E3\\u0A01-\\u0A03\\u0A3C\\u0A3E-\\u0A42\\u0A47\\u0A48\\u0A4B-\\u0A4D\\u0A51\\u0A70\\u0A71\\u0A75\\u0A81-\\u0A83\\u0ABC\\u0ABE-\\u0AC5\\u0AC7-\\u0AC9\\u0ACB-\\u0ACD\\u0AE2\\u0AE3\\u0B01-\\u0B03\\u0B3C\\u0B3E-\\u0B44\\u0B47\\u0B48\\u0B4B-\\u0B4D\\u0B56\\u0B57\\u0B62\\u0B63\\u0B82\\u0BBE-\\u0BC2\\u0BC6-\\u0BC8\\u0BCA-\\u0BCD\\u0BD7\\u0C00-\\u0C03\\u0C3E-\\u0C44\\u0C46-\\u0C48\\u0C4A-\\u0C4D\\u0C55\\u0C56\\u0C62\\u0C63\\u0C81-\\u0C83\\u0CBC\\u0CBE-\\u0CC4\\u0CC6-\\u0CC8\\u0CCA-\\u0CCD\\u0CD5\\u0CD6\\u0CE2\\u0CE3\\u0D01-\\u0D03\\u0D3E-\\u0D44\\u0D46-\\u0D48\\u0D4A-\\u0D4D\\u0D57\\u0D62\\u0D63\\u0D82\\u0D83\\u0DCA\\u0DCF-\\u0DD4\\u0DD6\\u0DD8-\\u0DDF\\u0DF2\\u0DF3\\u0E31\\u0E34-\\u0E3A\\u0E47-\\u0E4E\\u0EB1\\u0EB4-\\u0EB9\\u0EBB\\u0EBC\\u0EC8-\\u0ECD\\u0F18\\u0F19\\u0F35\\u0F37\\u0F39\\u0F3E\\u0F3F\\u0F71-\\u0F84\\u0F86\\u0F87\\u0F8D-\\u0F97\\u0F99-\\u0FBC\\u0FC6\\u102B-\\u103E\\u1056-\\u1059\\u105E-\\u1060\\u1062-\\u1064\\u1067-\\u106D\\u1071-\\u1074\\u1082-\\u108D\\u108F\\u109A-\\u109D\\u135D-\\u135F\\u1712-\\u1714\\u1732-\\u1734\\u1752\\u1753\\u1772\\u1773\\u17B4-\\u17D3\\u17DD\\u180B-\\u180D\\u18A9\\u1920-\\u192B\\u1930-\\u193B\\u19B0-\\u19C0\\u19C8\\u19C9\\u1A17-\\u1A1B\\u1A55-\\u1A5E\\u1A60-\\u1A7C\\u1A7F\\u1AB0-\\u1ABE\\u1B00-\\u1B04\\u1B34-\\u1B44\\u1B6B-\\u1B73\\u1B80-\\u1B82\\u1BA1-\\u1BAD\\u1BE6-\\u1BF3\\u1C24-\\u1C37\\u1CD0-\\u1CD2\\u1CD4-\\u1CE8\\u1CED\\u1CF2-\\u1CF4\\u1CF8\\u1CF9\\u1DC0-\\u1DF5\\u1DFC-\\u1DFF\\u20D0-\\u20F0\\u2CEF-\\u2CF1\\u2D7F\\u2DE0-\\u2DFF\\u302A-\\u302F\\u3099\\u309A\\uA66F-\\uA672\\uA674-\\uA67D\\uA69F\\uA6F0\\uA6F1\\uA802\\uA806\\uA80B\\uA823-\\uA827\\uA880\\uA881\\uA8B4-\\uA8C4\\uA8E0-\\uA8F1\\uA926-\\uA92D\\uA947-\\uA953\\uA980-\\uA983\\uA9B3-\\uA9C0\\uA9E5\\uAA29-\\uAA36\\uAA43\\uAA4C\\uAA4D\\uAA7B-\\uAA7D\\uAAB0\\uAAB2-\\uAAB4\\uAAB7\\uAAB8\\uAABE\\uAABF\\uAAC1\\uAAEB-\\uAAEF\\uAAF5\\uAAF6\\uABE3-\\uABEA\\uABEC\\uABED\\uFB1E\\uFE00-\\uFE0F\\uFE20-\\uFE2D]|\\uD800[\\uDDFD\\uDEE0\\uDF76-\\uDF7A]|\\uD802[\\uDE01-\\uDE03\\uDE05\\uDE06\\uDE0C-\\uDE0F\\uDE38-\\uDE3A\\uDE3F\\uDEE5\\uDEE6]|\\uD804[\\uDC00-\\uDC02\\uDC38-\\uDC46\\uDC7F-\\uDC82\\uDCB0-\\uDCBA\\uDD00-\\uDD02\\uDD27-\\uDD34\\uDD73\\uDD80-\\uDD82\\uDDB3-\\uDDC0\\uDE2C-\\uDE37\\uDEDF-\\uDEEA\\uDF01-\\uDF03\\uDF3C\\uDF3E-\\uDF44\\uDF47\\uDF48\\uDF4B-\\uDF4D\\uDF57\\uDF62\\uDF63\\uDF66-\\uDF6C\\uDF70-\\uDF74]|\\uD805[\\uDCB0-\\uDCC3\\uDDAF-\\uDDB5\\uDDB8-\\uDDC0\\uDE30-\\uDE40\\uDEAB-\\uDEB7]|\\uD81A[\\uDEF0-\\uDEF4\\uDF30-\\uDF36]|\\uD81B[\\uDF51-\\uDF7E\\uDF8F-\\uDF92]|\\uD82F[\\uDC9D\\uDC9E]|\\uD834[\\uDD65-\\uDD69\\uDD6D-\\uDD72\\uDD7B-\\uDD82\\uDD85-\\uDD8B\\uDDAA-\\uDDAD\\uDE42-\\uDE44]|\\uD83A[\\uDCD0-\\uDCD6]|\\uDB40[\\uDD00-\\uDDEF]/;\n\nfunction validateLabel(label, processing_option) {\n if (label.substr(0, 4) === \"xn--\") {\n label = punycode.toUnicode(label);\n processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL;\n }\n\n var error = false;\n\n if (normalize(label) !== label ||\n (label[3] === \"-\" && label[4] === \"-\") ||\n label[0] === \"-\" || label[label.length - 1] === \"-\" ||\n label.indexOf(\".\") !== -1 ||\n label.search(combiningMarksRegex) === 0) {\n error = true;\n }\n\n var len = countSymbols(label);\n for (var i = 0; i < len; ++i) {\n var status = findStatus(label.codePointAt(i));\n if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== \"valid\") ||\n (processing === PROCESSING_OPTIONS.NONTRANSITIONAL &&\n status[1] !== \"valid\" && status[1] !== \"deviation\")) {\n error = true;\n break;\n }\n }\n\n return {\n label: label,\n error: error\n };\n}\n\nfunction processing(domain_name, useSTD3, processing_option) {\n var result = mapChars(domain_name, useSTD3, processing_option);\n result.string = normalize(result.string);\n\n var labels = result.string.split(\".\");\n for (var i = 0; i < labels.length; ++i) {\n try {\n var validation = validateLabel(labels[i]);\n labels[i] = validation.label;\n result.error = result.error || validation.error;\n } catch(e) {\n result.error = true;\n }\n }\n\n return {\n string: labels.join(\".\"),\n error: result.error\n };\n}\n\nmodule.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) {\n var result = processing(domain_name, useSTD3, processing_option);\n var labels = result.string.split(\".\");\n labels = labels.map(function(l) {\n try {\n return punycode.toASCII(l);\n } catch(e) {\n result.error = true;\n return l;\n }\n });\n\n if (verifyDnsLength) {\n var total = labels.slice(0, labels.length - 1).join(\".\").length;\n if (total.length > 253 || total.length === 0) {\n result.error = true;\n }\n\n for (var i=0; i < labels.length; ++i) {\n if (labels.length > 63 || labels.length === 0) {\n result.error = true;\n break;\n }\n }\n }\n\n if (result.error) return null;\n return labels.join(\".\");\n};\n\nmodule.exports.toUnicode = function(domain_name, useSTD3) {\n var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL);\n\n return {\n domain: result.string,\n error: result.error\n };\n};\n\nmodule.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS;\n","\"use strict\";\n\nvar conversions = {};\nmodule.exports = conversions;\n\nfunction sign(x) {\n return x < 0 ? -1 : 1;\n}\n\nfunction evenRound(x) {\n // Round x to the nearest integer, choosing the even integer if it lies halfway between two.\n if ((x % 1) === 0.5 && (x & 1) === 0) { // [even number].5; round down (i.e. floor)\n return Math.floor(x);\n } else {\n return Math.round(x);\n }\n}\n\nfunction createNumberConversion(bitLength, typeOpts) {\n if (!typeOpts.unsigned) {\n --bitLength;\n }\n const lowerBound = typeOpts.unsigned ? 0 : -Math.pow(2, bitLength);\n const upperBound = Math.pow(2, bitLength) - 1;\n\n const moduloVal = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength) : Math.pow(2, bitLength);\n const moduloBound = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength - 1) : Math.pow(2, bitLength - 1);\n\n return function(V, opts) {\n if (!opts) opts = {};\n\n let x = +V;\n\n if (opts.enforceRange) {\n if (!Number.isFinite(x)) {\n throw new TypeError(\"Argument is not a finite number\");\n }\n\n x = sign(x) * Math.floor(Math.abs(x));\n if (x < lowerBound || x > upperBound) {\n throw new TypeError(\"Argument is not in byte range\");\n }\n\n return x;\n }\n\n if (!isNaN(x) && opts.clamp) {\n x = evenRound(x);\n\n if (x < lowerBound) x = lowerBound;\n if (x > upperBound) x = upperBound;\n return x;\n }\n\n if (!Number.isFinite(x) || x === 0) {\n return 0;\n }\n\n x = sign(x) * Math.floor(Math.abs(x));\n x = x % moduloVal;\n\n if (!typeOpts.unsigned && x >= moduloBound) {\n return x - moduloVal;\n } else if (typeOpts.unsigned) {\n if (x < 0) {\n x += moduloVal;\n } else if (x === -0) { // don't return negative zero\n return 0;\n }\n }\n\n return x;\n }\n}\n\nconversions[\"void\"] = function () {\n return undefined;\n};\n\nconversions[\"boolean\"] = function (val) {\n return !!val;\n};\n\nconversions[\"byte\"] = createNumberConversion(8, { unsigned: false });\nconversions[\"octet\"] = createNumberConversion(8, { unsigned: true });\n\nconversions[\"short\"] = createNumberConversion(16, { unsigned: false });\nconversions[\"unsigned short\"] = createNumberConversion(16, { unsigned: true });\n\nconversions[\"long\"] = createNumberConversion(32, { unsigned: false });\nconversions[\"unsigned long\"] = createNumberConversion(32, { unsigned: true });\n\nconversions[\"long long\"] = createNumberConversion(32, { unsigned: false, moduloBitLength: 64 });\nconversions[\"unsigned long long\"] = createNumberConversion(32, { unsigned: true, moduloBitLength: 64 });\n\nconversions[\"double\"] = function (V) {\n const x = +V;\n\n if (!Number.isFinite(x)) {\n throw new TypeError(\"Argument is not a finite floating-point value\");\n }\n\n return x;\n};\n\nconversions[\"unrestricted double\"] = function (V) {\n const x = +V;\n\n if (isNaN(x)) {\n throw new TypeError(\"Argument is NaN\");\n }\n\n return x;\n};\n\n// not quite valid, but good enough for JS\nconversions[\"float\"] = conversions[\"double\"];\nconversions[\"unrestricted float\"] = conversions[\"unrestricted double\"];\n\nconversions[\"DOMString\"] = function (V, opts) {\n if (!opts) opts = {};\n\n if (opts.treatNullAsEmptyString && V === null) {\n return \"\";\n }\n\n return String(V);\n};\n\nconversions[\"ByteString\"] = function (V, opts) {\n const x = String(V);\n let c = undefined;\n for (let i = 0; (c = x.codePointAt(i)) !== undefined; ++i) {\n if (c > 255) {\n throw new TypeError(\"Argument is not a valid bytestring\");\n }\n }\n\n return x;\n};\n\nconversions[\"USVString\"] = function (V) {\n const S = String(V);\n const n = S.length;\n const U = [];\n for (let i = 0; i < n; ++i) {\n const c = S.charCodeAt(i);\n if (c < 0xD800 || c > 0xDFFF) {\n U.push(String.fromCodePoint(c));\n } else if (0xDC00 <= c && c <= 0xDFFF) {\n U.push(String.fromCodePoint(0xFFFD));\n } else {\n if (i === n - 1) {\n U.push(String.fromCodePoint(0xFFFD));\n } else {\n const d = S.charCodeAt(i + 1);\n if (0xDC00 <= d && d <= 0xDFFF) {\n const a = c & 0x3FF;\n const b = d & 0x3FF;\n U.push(String.fromCodePoint((2 << 15) + (2 << 9) * a + b));\n ++i;\n } else {\n U.push(String.fromCodePoint(0xFFFD));\n }\n }\n }\n }\n\n return U.join('');\n};\n\nconversions[\"Date\"] = function (V, opts) {\n if (!(V instanceof Date)) {\n throw new TypeError(\"Argument is not a Date object\");\n }\n if (isNaN(V)) {\n return undefined;\n }\n\n return V;\n};\n\nconversions[\"RegExp\"] = function (V, opts) {\n if (!(V instanceof RegExp)) {\n V = new RegExp(V);\n }\n\n return V;\n};\n","\"use strict\";\nconst usm = require(\"./url-state-machine\");\n\nexports.implementation = class URLImpl {\n constructor(constructorArgs) {\n const url = constructorArgs[0];\n const base = constructorArgs[1];\n\n let parsedBase = null;\n if (base !== undefined) {\n parsedBase = usm.basicURLParse(base);\n if (parsedBase === \"failure\") {\n throw new TypeError(\"Invalid base URL\");\n }\n }\n\n const parsedURL = usm.basicURLParse(url, { baseURL: parsedBase });\n if (parsedURL === \"failure\") {\n throw new TypeError(\"Invalid URL\");\n }\n\n this._url = parsedURL;\n\n // TODO: query stuff\n }\n\n get href() {\n return usm.serializeURL(this._url);\n }\n\n set href(v) {\n const parsedURL = usm.basicURLParse(v);\n if (parsedURL === \"failure\") {\n throw new TypeError(\"Invalid URL\");\n }\n\n this._url = parsedURL;\n }\n\n get origin() {\n return usm.serializeURLOrigin(this._url);\n }\n\n get protocol() {\n return this._url.scheme + \":\";\n }\n\n set protocol(v) {\n usm.basicURLParse(v + \":\", { url: this._url, stateOverride: \"scheme start\" });\n }\n\n get username() {\n return this._url.username;\n }\n\n set username(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n usm.setTheUsername(this._url, v);\n }\n\n get password() {\n return this._url.password;\n }\n\n set password(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n usm.setThePassword(this._url, v);\n }\n\n get host() {\n const url = this._url;\n\n if (url.host === null) {\n return \"\";\n }\n\n if (url.port === null) {\n return usm.serializeHost(url.host);\n }\n\n return usm.serializeHost(url.host) + \":\" + usm.serializeInteger(url.port);\n }\n\n set host(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n usm.basicURLParse(v, { url: this._url, stateOverride: \"host\" });\n }\n\n get hostname() {\n if (this._url.host === null) {\n return \"\";\n }\n\n return usm.serializeHost(this._url.host);\n }\n\n set hostname(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n usm.basicURLParse(v, { url: this._url, stateOverride: \"hostname\" });\n }\n\n get port() {\n if (this._url.port === null) {\n return \"\";\n }\n\n return usm.serializeInteger(this._url.port);\n }\n\n set port(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n if (v === \"\") {\n this._url.port = null;\n } else {\n usm.basicURLParse(v, { url: this._url, stateOverride: \"port\" });\n }\n }\n\n get pathname() {\n if (this._url.cannotBeABaseURL) {\n return this._url.path[0];\n }\n\n if (this._url.path.length === 0) {\n return \"\";\n }\n\n return \"/\" + this._url.path.join(\"/\");\n }\n\n set pathname(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n this._url.path = [];\n usm.basicURLParse(v, { url: this._url, stateOverride: \"path start\" });\n }\n\n get search() {\n if (this._url.query === null || this._url.query === \"\") {\n return \"\";\n }\n\n return \"?\" + this._url.query;\n }\n\n set search(v) {\n // TODO: query stuff\n\n const url = this._url;\n\n if (v === \"\") {\n url.query = null;\n return;\n }\n\n const input = v[0] === \"?\" ? v.substring(1) : v;\n url.query = \"\";\n usm.basicURLParse(input, { url, stateOverride: \"query\" });\n }\n\n get hash() {\n if (this._url.fragment === null || this._url.fragment === \"\") {\n return \"\";\n }\n\n return \"#\" + this._url.fragment;\n }\n\n set hash(v) {\n if (v === \"\") {\n this._url.fragment = null;\n return;\n }\n\n const input = v[0] === \"#\" ? v.substring(1) : v;\n this._url.fragment = \"\";\n usm.basicURLParse(input, { url: this._url, stateOverride: \"fragment\" });\n }\n\n toJSON() {\n return this.href;\n }\n};\n","\"use strict\";\n\nconst conversions = require(\"webidl-conversions\");\nconst utils = require(\"./utils.js\");\nconst Impl = require(\".//URL-impl.js\");\n\nconst impl = utils.implSymbol;\n\nfunction URL(url) {\n if (!this || this[impl] || !(this instanceof URL)) {\n throw new TypeError(\"Failed to construct 'URL': Please use the 'new' operator, this DOM object constructor cannot be called as a function.\");\n }\n if (arguments.length < 1) {\n throw new TypeError(\"Failed to construct 'URL': 1 argument required, but only \" + arguments.length + \" present.\");\n }\n const args = [];\n for (let i = 0; i < arguments.length && i < 2; ++i) {\n args[i] = arguments[i];\n }\n args[0] = conversions[\"USVString\"](args[0]);\n if (args[1] !== undefined) {\n args[1] = conversions[\"USVString\"](args[1]);\n }\n\n module.exports.setup(this, args);\n}\n\nURL.prototype.toJSON = function toJSON() {\n if (!this || !module.exports.is(this)) {\n throw new TypeError(\"Illegal invocation\");\n }\n const args = [];\n for (let i = 0; i < arguments.length && i < 0; ++i) {\n args[i] = arguments[i];\n }\n return this[impl].toJSON.apply(this[impl], args);\n};\nObject.defineProperty(URL.prototype, \"href\", {\n get() {\n return this[impl].href;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].href = V;\n },\n enumerable: true,\n configurable: true\n});\n\nURL.prototype.toString = function () {\n if (!this || !module.exports.is(this)) {\n throw new TypeError(\"Illegal invocation\");\n }\n return this.href;\n};\n\nObject.defineProperty(URL.prototype, \"origin\", {\n get() {\n return this[impl].origin;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"protocol\", {\n get() {\n return this[impl].protocol;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].protocol = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"username\", {\n get() {\n return this[impl].username;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].username = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"password\", {\n get() {\n return this[impl].password;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].password = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"host\", {\n get() {\n return this[impl].host;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].host = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"hostname\", {\n get() {\n return this[impl].hostname;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].hostname = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"port\", {\n get() {\n return this[impl].port;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].port = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"pathname\", {\n get() {\n return this[impl].pathname;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].pathname = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"search\", {\n get() {\n return this[impl].search;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].search = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"hash\", {\n get() {\n return this[impl].hash;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].hash = V;\n },\n enumerable: true,\n configurable: true\n});\n\n\nmodule.exports = {\n is(obj) {\n return !!obj && obj[impl] instanceof Impl.implementation;\n },\n create(constructorArgs, privateData) {\n let obj = Object.create(URL.prototype);\n this.setup(obj, constructorArgs, privateData);\n return obj;\n },\n setup(obj, constructorArgs, privateData) {\n if (!privateData) privateData = {};\n privateData.wrapper = obj;\n\n obj[impl] = new Impl.implementation(constructorArgs, privateData);\n obj[impl][utils.wrapperSymbol] = obj;\n },\n interface: URL,\n expose: {\n Window: { URL: URL },\n Worker: { URL: URL }\n }\n};\n\n","\"use strict\";\n\nexports.URL = require(\"./URL\").interface;\nexports.serializeURL = require(\"./url-state-machine\").serializeURL;\nexports.serializeURLOrigin = require(\"./url-state-machine\").serializeURLOrigin;\nexports.basicURLParse = require(\"./url-state-machine\").basicURLParse;\nexports.setTheUsername = require(\"./url-state-machine\").setTheUsername;\nexports.setThePassword = require(\"./url-state-machine\").setThePassword;\nexports.serializeHost = require(\"./url-state-machine\").serializeHost;\nexports.serializeInteger = require(\"./url-state-machine\").serializeInteger;\nexports.parseURL = require(\"./url-state-machine\").parseURL;\n","\"use strict\";\r\nconst punycode = require(\"punycode\");\r\nconst tr46 = require(\"tr46\");\r\n\r\nconst specialSchemes = {\r\n ftp: 21,\r\n file: null,\r\n gopher: 70,\r\n http: 80,\r\n https: 443,\r\n ws: 80,\r\n wss: 443\r\n};\r\n\r\nconst failure = Symbol(\"failure\");\r\n\r\nfunction countSymbols(str) {\r\n return punycode.ucs2.decode(str).length;\r\n}\r\n\r\nfunction at(input, idx) {\r\n const c = input[idx];\r\n return isNaN(c) ? undefined : String.fromCodePoint(c);\r\n}\r\n\r\nfunction isASCIIDigit(c) {\r\n return c >= 0x30 && c <= 0x39;\r\n}\r\n\r\nfunction isASCIIAlpha(c) {\r\n return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A);\r\n}\r\n\r\nfunction isASCIIAlphanumeric(c) {\r\n return isASCIIAlpha(c) || isASCIIDigit(c);\r\n}\r\n\r\nfunction isASCIIHex(c) {\r\n return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66);\r\n}\r\n\r\nfunction isSingleDot(buffer) {\r\n return buffer === \".\" || buffer.toLowerCase() === \"%2e\";\r\n}\r\n\r\nfunction isDoubleDot(buffer) {\r\n buffer = buffer.toLowerCase();\r\n return buffer === \"..\" || buffer === \"%2e.\" || buffer === \".%2e\" || buffer === \"%2e%2e\";\r\n}\r\n\r\nfunction isWindowsDriveLetterCodePoints(cp1, cp2) {\r\n return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124);\r\n}\r\n\r\nfunction isWindowsDriveLetterString(string) {\r\n return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === \":\" || string[1] === \"|\");\r\n}\r\n\r\nfunction isNormalizedWindowsDriveLetterString(string) {\r\n return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === \":\";\r\n}\r\n\r\nfunction containsForbiddenHostCodePoint(string) {\r\n return string.search(/\\u0000|\\u0009|\\u000A|\\u000D|\\u0020|#|%|\\/|:|\\?|@|\\[|\\\\|\\]/) !== -1;\r\n}\r\n\r\nfunction containsForbiddenHostCodePointExcludingPercent(string) {\r\n return string.search(/\\u0000|\\u0009|\\u000A|\\u000D|\\u0020|#|\\/|:|\\?|@|\\[|\\\\|\\]/) !== -1;\r\n}\r\n\r\nfunction isSpecialScheme(scheme) {\r\n return specialSchemes[scheme] !== undefined;\r\n}\r\n\r\nfunction isSpecial(url) {\r\n return isSpecialScheme(url.scheme);\r\n}\r\n\r\nfunction defaultPort(scheme) {\r\n return specialSchemes[scheme];\r\n}\r\n\r\nfunction percentEncode(c) {\r\n let hex = c.toString(16).toUpperCase();\r\n if (hex.length === 1) {\r\n hex = \"0\" + hex;\r\n }\r\n\r\n return \"%\" + hex;\r\n}\r\n\r\nfunction utf8PercentEncode(c) {\r\n const buf = new Buffer(c);\r\n\r\n let str = \"\";\r\n\r\n for (let i = 0; i < buf.length; ++i) {\r\n str += percentEncode(buf[i]);\r\n }\r\n\r\n return str;\r\n}\r\n\r\nfunction utf8PercentDecode(str) {\r\n const input = new Buffer(str);\r\n const output = [];\r\n for (let i = 0; i < input.length; ++i) {\r\n if (input[i] !== 37) {\r\n output.push(input[i]);\r\n } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) {\r\n output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16));\r\n i += 2;\r\n } else {\r\n output.push(input[i]);\r\n }\r\n }\r\n return new Buffer(output).toString();\r\n}\r\n\r\nfunction isC0ControlPercentEncode(c) {\r\n return c <= 0x1F || c > 0x7E;\r\n}\r\n\r\nconst extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]);\r\nfunction isPathPercentEncode(c) {\r\n return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c);\r\n}\r\n\r\nconst extraUserinfoPercentEncodeSet =\r\n new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]);\r\nfunction isUserinfoPercentEncode(c) {\r\n return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c);\r\n}\r\n\r\nfunction percentEncodeChar(c, encodeSetPredicate) {\r\n const cStr = String.fromCodePoint(c);\r\n\r\n if (encodeSetPredicate(c)) {\r\n return utf8PercentEncode(cStr);\r\n }\r\n\r\n return cStr;\r\n}\r\n\r\nfunction parseIPv4Number(input) {\r\n let R = 10;\r\n\r\n if (input.length >= 2 && input.charAt(0) === \"0\" && input.charAt(1).toLowerCase() === \"x\") {\r\n input = input.substring(2);\r\n R = 16;\r\n } else if (input.length >= 2 && input.charAt(0) === \"0\") {\r\n input = input.substring(1);\r\n R = 8;\r\n }\r\n\r\n if (input === \"\") {\r\n return 0;\r\n }\r\n\r\n const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/);\r\n if (regex.test(input)) {\r\n return failure;\r\n }\r\n\r\n return parseInt(input, R);\r\n}\r\n\r\nfunction parseIPv4(input) {\r\n const parts = input.split(\".\");\r\n if (parts[parts.length - 1] === \"\") {\r\n if (parts.length > 1) {\r\n parts.pop();\r\n }\r\n }\r\n\r\n if (parts.length > 4) {\r\n return input;\r\n }\r\n\r\n const numbers = [];\r\n for (const part of parts) {\r\n if (part === \"\") {\r\n return input;\r\n }\r\n const n = parseIPv4Number(part);\r\n if (n === failure) {\r\n return input;\r\n }\r\n\r\n numbers.push(n);\r\n }\r\n\r\n for (let i = 0; i < numbers.length - 1; ++i) {\r\n if (numbers[i] > 255) {\r\n return failure;\r\n }\r\n }\r\n if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) {\r\n return failure;\r\n }\r\n\r\n let ipv4 = numbers.pop();\r\n let counter = 0;\r\n\r\n for (const n of numbers) {\r\n ipv4 += n * Math.pow(256, 3 - counter);\r\n ++counter;\r\n }\r\n\r\n return ipv4;\r\n}\r\n\r\nfunction serializeIPv4(address) {\r\n let output = \"\";\r\n let n = address;\r\n\r\n for (let i = 1; i <= 4; ++i) {\r\n output = String(n % 256) + output;\r\n if (i !== 4) {\r\n output = \".\" + output;\r\n }\r\n n = Math.floor(n / 256);\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction parseIPv6(input) {\r\n const address = [0, 0, 0, 0, 0, 0, 0, 0];\r\n let pieceIndex = 0;\r\n let compress = null;\r\n let pointer = 0;\r\n\r\n input = punycode.ucs2.decode(input);\r\n\r\n if (input[pointer] === 58) {\r\n if (input[pointer + 1] !== 58) {\r\n return failure;\r\n }\r\n\r\n pointer += 2;\r\n ++pieceIndex;\r\n compress = pieceIndex;\r\n }\r\n\r\n while (pointer < input.length) {\r\n if (pieceIndex === 8) {\r\n return failure;\r\n }\r\n\r\n if (input[pointer] === 58) {\r\n if (compress !== null) {\r\n return failure;\r\n }\r\n ++pointer;\r\n ++pieceIndex;\r\n compress = pieceIndex;\r\n continue;\r\n }\r\n\r\n let value = 0;\r\n let length = 0;\r\n\r\n while (length < 4 && isASCIIHex(input[pointer])) {\r\n value = value * 0x10 + parseInt(at(input, pointer), 16);\r\n ++pointer;\r\n ++length;\r\n }\r\n\r\n if (input[pointer] === 46) {\r\n if (length === 0) {\r\n return failure;\r\n }\r\n\r\n pointer -= length;\r\n\r\n if (pieceIndex > 6) {\r\n return failure;\r\n }\r\n\r\n let numbersSeen = 0;\r\n\r\n while (input[pointer] !== undefined) {\r\n let ipv4Piece = null;\r\n\r\n if (numbersSeen > 0) {\r\n if (input[pointer] === 46 && numbersSeen < 4) {\r\n ++pointer;\r\n } else {\r\n return failure;\r\n }\r\n }\r\n\r\n if (!isASCIIDigit(input[pointer])) {\r\n return failure;\r\n }\r\n\r\n while (isASCIIDigit(input[pointer])) {\r\n const number = parseInt(at(input, pointer));\r\n if (ipv4Piece === null) {\r\n ipv4Piece = number;\r\n } else if (ipv4Piece === 0) {\r\n return failure;\r\n } else {\r\n ipv4Piece = ipv4Piece * 10 + number;\r\n }\r\n if (ipv4Piece > 255) {\r\n return failure;\r\n }\r\n ++pointer;\r\n }\r\n\r\n address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece;\r\n\r\n ++numbersSeen;\r\n\r\n if (numbersSeen === 2 || numbersSeen === 4) {\r\n ++pieceIndex;\r\n }\r\n }\r\n\r\n if (numbersSeen !== 4) {\r\n return failure;\r\n }\r\n\r\n break;\r\n } else if (input[pointer] === 58) {\r\n ++pointer;\r\n if (input[pointer] === undefined) {\r\n return failure;\r\n }\r\n } else if (input[pointer] !== undefined) {\r\n return failure;\r\n }\r\n\r\n address[pieceIndex] = value;\r\n ++pieceIndex;\r\n }\r\n\r\n if (compress !== null) {\r\n let swaps = pieceIndex - compress;\r\n pieceIndex = 7;\r\n while (pieceIndex !== 0 && swaps > 0) {\r\n const temp = address[compress + swaps - 1];\r\n address[compress + swaps - 1] = address[pieceIndex];\r\n address[pieceIndex] = temp;\r\n --pieceIndex;\r\n --swaps;\r\n }\r\n } else if (compress === null && pieceIndex !== 8) {\r\n return failure;\r\n }\r\n\r\n return address;\r\n}\r\n\r\nfunction serializeIPv6(address) {\r\n let output = \"\";\r\n const seqResult = findLongestZeroSequence(address);\r\n const compress = seqResult.idx;\r\n let ignore0 = false;\r\n\r\n for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) {\r\n if (ignore0 && address[pieceIndex] === 0) {\r\n continue;\r\n } else if (ignore0) {\r\n ignore0 = false;\r\n }\r\n\r\n if (compress === pieceIndex) {\r\n const separator = pieceIndex === 0 ? \"::\" : \":\";\r\n output += separator;\r\n ignore0 = true;\r\n continue;\r\n }\r\n\r\n output += address[pieceIndex].toString(16);\r\n\r\n if (pieceIndex !== 7) {\r\n output += \":\";\r\n }\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction parseHost(input, isSpecialArg) {\r\n if (input[0] === \"[\") {\r\n if (input[input.length - 1] !== \"]\") {\r\n return failure;\r\n }\r\n\r\n return parseIPv6(input.substring(1, input.length - 1));\r\n }\r\n\r\n if (!isSpecialArg) {\r\n return parseOpaqueHost(input);\r\n }\r\n\r\n const domain = utf8PercentDecode(input);\r\n const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false);\r\n if (asciiDomain === null) {\r\n return failure;\r\n }\r\n\r\n if (containsForbiddenHostCodePoint(asciiDomain)) {\r\n return failure;\r\n }\r\n\r\n const ipv4Host = parseIPv4(asciiDomain);\r\n if (typeof ipv4Host === \"number\" || ipv4Host === failure) {\r\n return ipv4Host;\r\n }\r\n\r\n return asciiDomain;\r\n}\r\n\r\nfunction parseOpaqueHost(input) {\r\n if (containsForbiddenHostCodePointExcludingPercent(input)) {\r\n return failure;\r\n }\r\n\r\n let output = \"\";\r\n const decoded = punycode.ucs2.decode(input);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n output += percentEncodeChar(decoded[i], isC0ControlPercentEncode);\r\n }\r\n return output;\r\n}\r\n\r\nfunction findLongestZeroSequence(arr) {\r\n let maxIdx = null;\r\n let maxLen = 1; // only find elements > 1\r\n let currStart = null;\r\n let currLen = 0;\r\n\r\n for (let i = 0; i < arr.length; ++i) {\r\n if (arr[i] !== 0) {\r\n if (currLen > maxLen) {\r\n maxIdx = currStart;\r\n maxLen = currLen;\r\n }\r\n\r\n currStart = null;\r\n currLen = 0;\r\n } else {\r\n if (currStart === null) {\r\n currStart = i;\r\n }\r\n ++currLen;\r\n }\r\n }\r\n\r\n // if trailing zeros\r\n if (currLen > maxLen) {\r\n maxIdx = currStart;\r\n maxLen = currLen;\r\n }\r\n\r\n return {\r\n idx: maxIdx,\r\n len: maxLen\r\n };\r\n}\r\n\r\nfunction serializeHost(host) {\r\n if (typeof host === \"number\") {\r\n return serializeIPv4(host);\r\n }\r\n\r\n // IPv6 serializer\r\n if (host instanceof Array) {\r\n return \"[\" + serializeIPv6(host) + \"]\";\r\n }\r\n\r\n return host;\r\n}\r\n\r\nfunction trimControlChars(url) {\r\n return url.replace(/^[\\u0000-\\u001F\\u0020]+|[\\u0000-\\u001F\\u0020]+$/g, \"\");\r\n}\r\n\r\nfunction trimTabAndNewline(url) {\r\n return url.replace(/\\u0009|\\u000A|\\u000D/g, \"\");\r\n}\r\n\r\nfunction shortenPath(url) {\r\n const path = url.path;\r\n if (path.length === 0) {\r\n return;\r\n }\r\n if (url.scheme === \"file\" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) {\r\n return;\r\n }\r\n\r\n path.pop();\r\n}\r\n\r\nfunction includesCredentials(url) {\r\n return url.username !== \"\" || url.password !== \"\";\r\n}\r\n\r\nfunction cannotHaveAUsernamePasswordPort(url) {\r\n return url.host === null || url.host === \"\" || url.cannotBeABaseURL || url.scheme === \"file\";\r\n}\r\n\r\nfunction isNormalizedWindowsDriveLetter(string) {\r\n return /^[A-Za-z]:$/.test(string);\r\n}\r\n\r\nfunction URLStateMachine(input, base, encodingOverride, url, stateOverride) {\r\n this.pointer = 0;\r\n this.input = input;\r\n this.base = base || null;\r\n this.encodingOverride = encodingOverride || \"utf-8\";\r\n this.stateOverride = stateOverride;\r\n this.url = url;\r\n this.failure = false;\r\n this.parseError = false;\r\n\r\n if (!this.url) {\r\n this.url = {\r\n scheme: \"\",\r\n username: \"\",\r\n password: \"\",\r\n host: null,\r\n port: null,\r\n path: [],\r\n query: null,\r\n fragment: null,\r\n\r\n cannotBeABaseURL: false\r\n };\r\n\r\n const res = trimControlChars(this.input);\r\n if (res !== this.input) {\r\n this.parseError = true;\r\n }\r\n this.input = res;\r\n }\r\n\r\n const res = trimTabAndNewline(this.input);\r\n if (res !== this.input) {\r\n this.parseError = true;\r\n }\r\n this.input = res;\r\n\r\n this.state = stateOverride || \"scheme start\";\r\n\r\n this.buffer = \"\";\r\n this.atFlag = false;\r\n this.arrFlag = false;\r\n this.passwordTokenSeenFlag = false;\r\n\r\n this.input = punycode.ucs2.decode(this.input);\r\n\r\n for (; this.pointer <= this.input.length; ++this.pointer) {\r\n const c = this.input[this.pointer];\r\n const cStr = isNaN(c) ? undefined : String.fromCodePoint(c);\r\n\r\n // exec state machine\r\n const ret = this[\"parse \" + this.state](c, cStr);\r\n if (!ret) {\r\n break; // terminate algorithm\r\n } else if (ret === failure) {\r\n this.failure = true;\r\n break;\r\n }\r\n }\r\n}\r\n\r\nURLStateMachine.prototype[\"parse scheme start\"] = function parseSchemeStart(c, cStr) {\r\n if (isASCIIAlpha(c)) {\r\n this.buffer += cStr.toLowerCase();\r\n this.state = \"scheme\";\r\n } else if (!this.stateOverride) {\r\n this.state = \"no scheme\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse scheme\"] = function parseScheme(c, cStr) {\r\n if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) {\r\n this.buffer += cStr.toLowerCase();\r\n } else if (c === 58) {\r\n if (this.stateOverride) {\r\n if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) {\r\n return false;\r\n }\r\n\r\n if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) {\r\n return false;\r\n }\r\n\r\n if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === \"file\") {\r\n return false;\r\n }\r\n\r\n if (this.url.scheme === \"file\" && (this.url.host === \"\" || this.url.host === null)) {\r\n return false;\r\n }\r\n }\r\n this.url.scheme = this.buffer;\r\n this.buffer = \"\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n if (this.url.scheme === \"file\") {\r\n if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file\";\r\n } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) {\r\n this.state = \"special relative or authority\";\r\n } else if (isSpecial(this.url)) {\r\n this.state = \"special authority slashes\";\r\n } else if (this.input[this.pointer + 1] === 47) {\r\n this.state = \"path or authority\";\r\n ++this.pointer;\r\n } else {\r\n this.url.cannotBeABaseURL = true;\r\n this.url.path.push(\"\");\r\n this.state = \"cannot-be-a-base-URL path\";\r\n }\r\n } else if (!this.stateOverride) {\r\n this.buffer = \"\";\r\n this.state = \"no scheme\";\r\n this.pointer = -1;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse no scheme\"] = function parseNoScheme(c) {\r\n if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) {\r\n return failure;\r\n } else if (this.base.cannotBeABaseURL && c === 35) {\r\n this.url.scheme = this.base.scheme;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.url.cannotBeABaseURL = true;\r\n this.state = \"fragment\";\r\n } else if (this.base.scheme === \"file\") {\r\n this.state = \"file\";\r\n --this.pointer;\r\n } else {\r\n this.state = \"relative\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special relative or authority\"] = function parseSpecialRelativeOrAuthority(c) {\r\n if (c === 47 && this.input[this.pointer + 1] === 47) {\r\n this.state = \"special authority ignore slashes\";\r\n ++this.pointer;\r\n } else {\r\n this.parseError = true;\r\n this.state = \"relative\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path or authority\"] = function parsePathOrAuthority(c) {\r\n if (c === 47) {\r\n this.state = \"authority\";\r\n } else {\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse relative\"] = function parseRelative(c) {\r\n this.url.scheme = this.base.scheme;\r\n if (isNaN(c)) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n } else if (c === 47) {\r\n this.state = \"relative slash\";\r\n } else if (c === 63) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else if (isSpecial(this.url) && c === 92) {\r\n this.parseError = true;\r\n this.state = \"relative slash\";\r\n } else {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice(0, this.base.path.length - 1);\r\n\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse relative slash\"] = function parseRelativeSlash(c) {\r\n if (isSpecial(this.url) && (c === 47 || c === 92)) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"special authority ignore slashes\";\r\n } else if (c === 47) {\r\n this.state = \"authority\";\r\n } else {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special authority slashes\"] = function parseSpecialAuthoritySlashes(c) {\r\n if (c === 47 && this.input[this.pointer + 1] === 47) {\r\n this.state = \"special authority ignore slashes\";\r\n ++this.pointer;\r\n } else {\r\n this.parseError = true;\r\n this.state = \"special authority ignore slashes\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special authority ignore slashes\"] = function parseSpecialAuthorityIgnoreSlashes(c) {\r\n if (c !== 47 && c !== 92) {\r\n this.state = \"authority\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse authority\"] = function parseAuthority(c, cStr) {\r\n if (c === 64) {\r\n this.parseError = true;\r\n if (this.atFlag) {\r\n this.buffer = \"%40\" + this.buffer;\r\n }\r\n this.atFlag = true;\r\n\r\n // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars\r\n const len = countSymbols(this.buffer);\r\n for (let pointer = 0; pointer < len; ++pointer) {\r\n const codePoint = this.buffer.codePointAt(pointer);\r\n\r\n if (codePoint === 58 && !this.passwordTokenSeenFlag) {\r\n this.passwordTokenSeenFlag = true;\r\n continue;\r\n }\r\n const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode);\r\n if (this.passwordTokenSeenFlag) {\r\n this.url.password += encodedCodePoints;\r\n } else {\r\n this.url.username += encodedCodePoints;\r\n }\r\n }\r\n this.buffer = \"\";\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92)) {\r\n if (this.atFlag && this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n this.pointer -= countSymbols(this.buffer) + 1;\r\n this.buffer = \"\";\r\n this.state = \"host\";\r\n } else {\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse hostname\"] =\r\nURLStateMachine.prototype[\"parse host\"] = function parseHostName(c, cStr) {\r\n if (this.stateOverride && this.url.scheme === \"file\") {\r\n --this.pointer;\r\n this.state = \"file host\";\r\n } else if (c === 58 && !this.arrFlag) {\r\n if (this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n const host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n\r\n this.url.host = host;\r\n this.buffer = \"\";\r\n this.state = \"port\";\r\n if (this.stateOverride === \"hostname\") {\r\n return false;\r\n }\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92)) {\r\n --this.pointer;\r\n if (isSpecial(this.url) && this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n } else if (this.stateOverride && this.buffer === \"\" &&\r\n (includesCredentials(this.url) || this.url.port !== null)) {\r\n this.parseError = true;\r\n return false;\r\n }\r\n\r\n const host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n\r\n this.url.host = host;\r\n this.buffer = \"\";\r\n this.state = \"path start\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n } else {\r\n if (c === 91) {\r\n this.arrFlag = true;\r\n } else if (c === 93) {\r\n this.arrFlag = false;\r\n }\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse port\"] = function parsePort(c, cStr) {\r\n if (isASCIIDigit(c)) {\r\n this.buffer += cStr;\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92) ||\r\n this.stateOverride) {\r\n if (this.buffer !== \"\") {\r\n const port = parseInt(this.buffer);\r\n if (port > Math.pow(2, 16) - 1) {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n this.url.port = port === defaultPort(this.url.scheme) ? null : port;\r\n this.buffer = \"\";\r\n }\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n this.state = \"path start\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nconst fileOtherwiseCodePoints = new Set([47, 92, 63, 35]);\r\n\r\nURLStateMachine.prototype[\"parse file\"] = function parseFile(c) {\r\n this.url.scheme = \"file\";\r\n\r\n if (c === 47 || c === 92) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file slash\";\r\n } else if (this.base !== null && this.base.scheme === \"file\") {\r\n if (isNaN(c)) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n } else if (c === 63) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else {\r\n if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points\r\n !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) ||\r\n (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points\r\n !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n shortenPath(this.url);\r\n } else {\r\n this.parseError = true;\r\n }\r\n\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n } else {\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse file slash\"] = function parseFileSlash(c) {\r\n if (c === 47 || c === 92) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file host\";\r\n } else {\r\n if (this.base !== null && this.base.scheme === \"file\") {\r\n if (isNormalizedWindowsDriveLetterString(this.base.path[0])) {\r\n this.url.path.push(this.base.path[0]);\r\n } else {\r\n this.url.host = this.base.host;\r\n }\r\n }\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse file host\"] = function parseFileHost(c, cStr) {\r\n if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) {\r\n --this.pointer;\r\n if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) {\r\n this.parseError = true;\r\n this.state = \"path\";\r\n } else if (this.buffer === \"\") {\r\n this.url.host = \"\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n this.state = \"path start\";\r\n } else {\r\n let host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n if (host === \"localhost\") {\r\n host = \"\";\r\n }\r\n this.url.host = host;\r\n\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n\r\n this.buffer = \"\";\r\n this.state = \"path start\";\r\n }\r\n } else {\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path start\"] = function parsePathStart(c) {\r\n if (isSpecial(this.url)) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"path\";\r\n\r\n if (c !== 47 && c !== 92) {\r\n --this.pointer;\r\n }\r\n } else if (!this.stateOverride && c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (!this.stateOverride && c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else if (c !== undefined) {\r\n this.state = \"path\";\r\n if (c !== 47) {\r\n --this.pointer;\r\n }\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path\"] = function parsePath(c) {\r\n if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) ||\r\n (!this.stateOverride && (c === 63 || c === 35))) {\r\n if (isSpecial(this.url) && c === 92) {\r\n this.parseError = true;\r\n }\r\n\r\n if (isDoubleDot(this.buffer)) {\r\n shortenPath(this.url);\r\n if (c !== 47 && !(isSpecial(this.url) && c === 92)) {\r\n this.url.path.push(\"\");\r\n }\r\n } else if (isSingleDot(this.buffer) && c !== 47 &&\r\n !(isSpecial(this.url) && c === 92)) {\r\n this.url.path.push(\"\");\r\n } else if (!isSingleDot(this.buffer)) {\r\n if (this.url.scheme === \"file\" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) {\r\n if (this.url.host !== \"\" && this.url.host !== null) {\r\n this.parseError = true;\r\n this.url.host = \"\";\r\n }\r\n this.buffer = this.buffer[0] + \":\";\r\n }\r\n this.url.path.push(this.buffer);\r\n }\r\n this.buffer = \"\";\r\n if (this.url.scheme === \"file\" && (c === undefined || c === 63 || c === 35)) {\r\n while (this.url.path.length > 1 && this.url.path[0] === \"\") {\r\n this.parseError = true;\r\n this.url.path.shift();\r\n }\r\n }\r\n if (c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n }\r\n if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n }\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.buffer += percentEncodeChar(c, isPathPercentEncode);\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse cannot-be-a-base-URL path\"] = function parseCannotBeABaseURLPath(c) {\r\n if (c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else {\r\n // TODO: Add: not a URL code point\r\n if (!isNaN(c) && c !== 37) {\r\n this.parseError = true;\r\n }\r\n\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n if (!isNaN(c)) {\r\n this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode);\r\n }\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse query\"] = function parseQuery(c, cStr) {\r\n if (isNaN(c) || (!this.stateOverride && c === 35)) {\r\n if (!isSpecial(this.url) || this.url.scheme === \"ws\" || this.url.scheme === \"wss\") {\r\n this.encodingOverride = \"utf-8\";\r\n }\r\n\r\n const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead\r\n for (let i = 0; i < buffer.length; ++i) {\r\n if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 ||\r\n buffer[i] === 0x3C || buffer[i] === 0x3E) {\r\n this.url.query += percentEncode(buffer[i]);\r\n } else {\r\n this.url.query += String.fromCodePoint(buffer[i]);\r\n }\r\n }\r\n\r\n this.buffer = \"\";\r\n if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n }\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse fragment\"] = function parseFragment(c) {\r\n if (isNaN(c)) { // do nothing\r\n } else if (c === 0x0) {\r\n this.parseError = true;\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode);\r\n }\r\n\r\n return true;\r\n};\r\n\r\nfunction serializeURL(url, excludeFragment) {\r\n let output = url.scheme + \":\";\r\n if (url.host !== null) {\r\n output += \"//\";\r\n\r\n if (url.username !== \"\" || url.password !== \"\") {\r\n output += url.username;\r\n if (url.password !== \"\") {\r\n output += \":\" + url.password;\r\n }\r\n output += \"@\";\r\n }\r\n\r\n output += serializeHost(url.host);\r\n\r\n if (url.port !== null) {\r\n output += \":\" + url.port;\r\n }\r\n } else if (url.host === null && url.scheme === \"file\") {\r\n output += \"//\";\r\n }\r\n\r\n if (url.cannotBeABaseURL) {\r\n output += url.path[0];\r\n } else {\r\n for (const string of url.path) {\r\n output += \"/\" + string;\r\n }\r\n }\r\n\r\n if (url.query !== null) {\r\n output += \"?\" + url.query;\r\n }\r\n\r\n if (!excludeFragment && url.fragment !== null) {\r\n output += \"#\" + url.fragment;\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction serializeOrigin(tuple) {\r\n let result = tuple.scheme + \"://\";\r\n result += serializeHost(tuple.host);\r\n\r\n if (tuple.port !== null) {\r\n result += \":\" + tuple.port;\r\n }\r\n\r\n return result;\r\n}\r\n\r\nmodule.exports.serializeURL = serializeURL;\r\n\r\nmodule.exports.serializeURLOrigin = function (url) {\r\n // https://url.spec.whatwg.org/#concept-url-origin\r\n switch (url.scheme) {\r\n case \"blob\":\r\n try {\r\n return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0]));\r\n } catch (e) {\r\n // serializing an opaque origin returns \"null\"\r\n return \"null\";\r\n }\r\n case \"ftp\":\r\n case \"gopher\":\r\n case \"http\":\r\n case \"https\":\r\n case \"ws\":\r\n case \"wss\":\r\n return serializeOrigin({\r\n scheme: url.scheme,\r\n host: url.host,\r\n port: url.port\r\n });\r\n case \"file\":\r\n // spec says \"exercise to the reader\", chrome says \"file://\"\r\n return \"file://\";\r\n default:\r\n // serializing an opaque origin returns \"null\"\r\n return \"null\";\r\n }\r\n};\r\n\r\nmodule.exports.basicURLParse = function (input, options) {\r\n if (options === undefined) {\r\n options = {};\r\n }\r\n\r\n const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride);\r\n if (usm.failure) {\r\n return \"failure\";\r\n }\r\n\r\n return usm.url;\r\n};\r\n\r\nmodule.exports.setTheUsername = function (url, username) {\r\n url.username = \"\";\r\n const decoded = punycode.ucs2.decode(username);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode);\r\n }\r\n};\r\n\r\nmodule.exports.setThePassword = function (url, password) {\r\n url.password = \"\";\r\n const decoded = punycode.ucs2.decode(password);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode);\r\n }\r\n};\r\n\r\nmodule.exports.serializeHost = serializeHost;\r\n\r\nmodule.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort;\r\n\r\nmodule.exports.serializeInteger = function (integer) {\r\n return String(integer);\r\n};\r\n\r\nmodule.exports.parseURL = function (input, options) {\r\n if (options === undefined) {\r\n options = {};\r\n }\r\n\r\n // We don't handle blobs, so this just delegates:\r\n return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride });\r\n};\r\n","\"use strict\";\n\nmodule.exports.mixin = function mixin(target, source) {\n const keys = Object.getOwnPropertyNames(source);\n for (let i = 0; i < keys.length; ++i) {\n Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i]));\n }\n};\n\nmodule.exports.wrapperSymbol = Symbol(\"wrapper\");\nmodule.exports.implSymbol = Symbol(\"impl\");\n\nmodule.exports.wrapperForImpl = function (impl) {\n return impl[module.exports.wrapperSymbol];\n};\n\nmodule.exports.implForWrapper = function (wrapper) {\n return wrapper[module.exports.implSymbol];\n};\n\n","var wrappy = require('wrappy')\nmodule.exports = wrappy(once)\nmodule.exports.strict = wrappy(onceStrict)\n\nonce.proto = once(function () {\n Object.defineProperty(Function.prototype, 'once', {\n value: function () {\n return once(this)\n },\n configurable: true\n })\n\n Object.defineProperty(Function.prototype, 'onceStrict', {\n value: function () {\n return onceStrict(this)\n },\n configurable: true\n })\n})\n\nfunction once (fn) {\n var f = function () {\n if (f.called) return f.value\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n f.called = false\n return f\n}\n\nfunction onceStrict (fn) {\n var f = function () {\n if (f.called)\n throw new Error(f.onceError)\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n var name = fn.name || 'Function wrapped with `once`'\n f.onceError = name + \" shouldn't be called more than once\"\n f.called = false\n return f\n}\n","module.exports = require('./lib/tunnel');\n","'use strict';\n\nvar net = require('net');\nvar tls = require('tls');\nvar http = require('http');\nvar https = require('https');\nvar events = require('events');\nvar assert = require('assert');\nvar util = require('util');\n\n\nexports.httpOverHttp = httpOverHttp;\nexports.httpsOverHttp = httpsOverHttp;\nexports.httpOverHttps = httpOverHttps;\nexports.httpsOverHttps = httpsOverHttps;\n\n\nfunction httpOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n return agent;\n}\n\nfunction httpsOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\nfunction httpOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n return agent;\n}\n\nfunction httpsOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\n\nfunction TunnelingAgent(options) {\n var self = this;\n self.options = options || {};\n self.proxyOptions = self.options.proxy || {};\n self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;\n self.requests = [];\n self.sockets = [];\n\n self.on('free', function onFree(socket, host, port, localAddress) {\n var options = toOptions(host, port, localAddress);\n for (var i = 0, len = self.requests.length; i < len; ++i) {\n var pending = self.requests[i];\n if (pending.host === options.host && pending.port === options.port) {\n // Detect the request to connect same origin server,\n // reuse the connection.\n self.requests.splice(i, 1);\n pending.request.onSocket(socket);\n return;\n }\n }\n socket.destroy();\n self.removeSocket(socket);\n });\n}\nutil.inherits(TunnelingAgent, events.EventEmitter);\n\nTunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {\n var self = this;\n var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));\n\n if (self.sockets.length >= this.maxSockets) {\n // We are over limit so we'll add it to the queue.\n self.requests.push(options);\n return;\n }\n\n // If we are under maxSockets create a new one.\n self.createSocket(options, function(socket) {\n socket.on('free', onFree);\n socket.on('close', onCloseOrRemove);\n socket.on('agentRemove', onCloseOrRemove);\n req.onSocket(socket);\n\n function onFree() {\n self.emit('free', socket, options);\n }\n\n function onCloseOrRemove(err) {\n self.removeSocket(socket);\n socket.removeListener('free', onFree);\n socket.removeListener('close', onCloseOrRemove);\n socket.removeListener('agentRemove', onCloseOrRemove);\n }\n });\n};\n\nTunnelingAgent.prototype.createSocket = function createSocket(options, cb) {\n var self = this;\n var placeholder = {};\n self.sockets.push(placeholder);\n\n var connectOptions = mergeOptions({}, self.proxyOptions, {\n method: 'CONNECT',\n path: options.host + ':' + options.port,\n agent: false,\n headers: {\n host: options.host + ':' + options.port\n }\n });\n if (options.localAddress) {\n connectOptions.localAddress = options.localAddress;\n }\n if (connectOptions.proxyAuth) {\n connectOptions.headers = connectOptions.headers || {};\n connectOptions.headers['Proxy-Authorization'] = 'Basic ' +\n new Buffer(connectOptions.proxyAuth).toString('base64');\n }\n\n debug('making CONNECT request');\n var connectReq = self.request(connectOptions);\n connectReq.useChunkedEncodingByDefault = false; // for v0.6\n connectReq.once('response', onResponse); // for v0.6\n connectReq.once('upgrade', onUpgrade); // for v0.6\n connectReq.once('connect', onConnect); // for v0.7 or later\n connectReq.once('error', onError);\n connectReq.end();\n\n function onResponse(res) {\n // Very hacky. This is necessary to avoid http-parser leaks.\n res.upgrade = true;\n }\n\n function onUpgrade(res, socket, head) {\n // Hacky.\n process.nextTick(function() {\n onConnect(res, socket, head);\n });\n }\n\n function onConnect(res, socket, head) {\n connectReq.removeAllListeners();\n socket.removeAllListeners();\n\n if (res.statusCode !== 200) {\n debug('tunneling socket could not be established, statusCode=%d',\n res.statusCode);\n socket.destroy();\n var error = new Error('tunneling socket could not be established, ' +\n 'statusCode=' + res.statusCode);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n if (head.length > 0) {\n debug('got illegal response body from proxy');\n socket.destroy();\n var error = new Error('got illegal response body from proxy');\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n debug('tunneling connection has established');\n self.sockets[self.sockets.indexOf(placeholder)] = socket;\n return cb(socket);\n }\n\n function onError(cause) {\n connectReq.removeAllListeners();\n\n debug('tunneling socket could not be established, cause=%s\\n',\n cause.message, cause.stack);\n var error = new Error('tunneling socket could not be established, ' +\n 'cause=' + cause.message);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n }\n};\n\nTunnelingAgent.prototype.removeSocket = function removeSocket(socket) {\n var pos = this.sockets.indexOf(socket)\n if (pos === -1) {\n return;\n }\n this.sockets.splice(pos, 1);\n\n var pending = this.requests.shift();\n if (pending) {\n // If we have pending requests and a socket gets closed a new one\n // needs to be created to take over in the pool for the one that closed.\n this.createSocket(pending, function(socket) {\n pending.request.onSocket(socket);\n });\n }\n};\n\nfunction createSecureSocket(options, cb) {\n var self = this;\n TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {\n var hostHeader = options.request.getHeader('host');\n var tlsOptions = mergeOptions({}, self.options, {\n socket: socket,\n servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host\n });\n\n // 0 is dummy port for v0.6\n var secureSocket = tls.connect(0, tlsOptions);\n self.sockets[self.sockets.indexOf(socket)] = secureSocket;\n cb(secureSocket);\n });\n}\n\n\nfunction toOptions(host, port, localAddress) {\n if (typeof host === 'string') { // since v0.10\n return {\n host: host,\n port: port,\n localAddress: localAddress\n };\n }\n return host; // for v0.11 or later\n}\n\nfunction mergeOptions(target) {\n for (var i = 1, len = arguments.length; i < len; ++i) {\n var overrides = arguments[i];\n if (typeof overrides === 'object') {\n var keys = Object.keys(overrides);\n for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {\n var k = keys[j];\n if (overrides[k] !== undefined) {\n target[k] = overrides[k];\n }\n }\n }\n }\n return target;\n}\n\n\nvar debug;\nif (process.env.NODE_DEBUG && /\\btunnel\\b/.test(process.env.NODE_DEBUG)) {\n debug = function() {\n var args = Array.prototype.slice.call(arguments);\n if (typeof args[0] === 'string') {\n args[0] = 'TUNNEL: ' + args[0];\n } else {\n args.unshift('TUNNEL:');\n }\n console.error.apply(console, args);\n }\n} else {\n debug = function() {};\n}\nexports.debug = debug; // for test\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction getUserAgent() {\n if (typeof navigator === \"object\" && \"userAgent\" in navigator) {\n return navigator.userAgent;\n }\n\n if (typeof process === \"object\" && \"version\" in process) {\n return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`;\n }\n\n return \"\";\n}\n\nexports.getUserAgent = getUserAgent;\n//# sourceMappingURL=index.js.map\n","// Returns a wrapper function that returns a wrapped callback\n// The wrapper function should do some stuff, and return a\n// presumably different callback function.\n// This makes sure that own properties are retained, so that\n// decorations and such are not lost along the way.\nmodule.exports = wrappy\nfunction wrappy (fn, cb) {\n if (fn && cb) return wrappy(fn)(cb)\n\n if (typeof fn !== 'function')\n throw new TypeError('need wrapper function')\n\n Object.keys(fn).forEach(function (k) {\n wrapper[k] = fn[k]\n })\n\n return wrapper\n\n function wrapper() {\n var args = new Array(arguments.length)\n for (var i = 0; i < args.length; i++) {\n args[i] = arguments[i]\n }\n var ret = fn.apply(this, args)\n var cb = args[args.length-1]\n if (typeof ret === 'function' && ret !== cb) {\n Object.keys(cb).forEach(function (k) {\n ret[k] = cb[k]\n })\n }\n return ret\n }\n}\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.setErrorMap = exports.overrideErrorMap = exports.defaultErrorMap = exports.ZodError = exports.quotelessJson = exports.ZodIssueCode = void 0;\nconst util_1 = require(\"./helpers/util\");\nexports.ZodIssueCode = util_1.util.arrayToEnum([\n \"invalid_type\",\n \"invalid_literal\",\n \"custom\",\n \"invalid_union\",\n \"invalid_union_discriminator\",\n \"invalid_enum_value\",\n \"unrecognized_keys\",\n \"invalid_arguments\",\n \"invalid_return_type\",\n \"invalid_date\",\n \"invalid_string\",\n \"too_small\",\n \"too_big\",\n \"invalid_intersection_types\",\n \"not_multiple_of\",\n]);\nconst quotelessJson = (obj) => {\n const json = JSON.stringify(obj, null, 2);\n return json.replace(/\"([^\"]+)\":/g, \"$1:\");\n};\nexports.quotelessJson = quotelessJson;\nclass ZodError extends Error {\n constructor(issues) {\n super();\n this.issues = [];\n this.addIssue = (sub) => {\n this.issues = [...this.issues, sub];\n };\n this.addIssues = (subs = []) => {\n this.issues = [...this.issues, ...subs];\n };\n const actualProto = new.target.prototype;\n if (Object.setPrototypeOf) {\n // eslint-disable-next-line ban/ban\n Object.setPrototypeOf(this, actualProto);\n }\n else {\n this.__proto__ = actualProto;\n }\n this.name = \"ZodError\";\n this.issues = issues;\n }\n get errors() {\n return this.issues;\n }\n format(_mapper) {\n const mapper = _mapper ||\n function (issue) {\n return issue.message;\n };\n const fieldErrors = { _errors: [] };\n const processError = (error) => {\n for (const issue of error.issues) {\n if (issue.code === \"invalid_union\") {\n issue.unionErrors.map(processError);\n }\n else if (issue.code === \"invalid_return_type\") {\n processError(issue.returnTypeError);\n }\n else if (issue.code === \"invalid_arguments\") {\n processError(issue.argumentsError);\n }\n else if (issue.path.length === 0) {\n fieldErrors._errors.push(mapper(issue));\n }\n else {\n let curr = fieldErrors;\n let i = 0;\n while (i < issue.path.length) {\n const el = issue.path[i];\n const terminal = i === issue.path.length - 1;\n if (!terminal) {\n curr[el] = curr[el] || { _errors: [] };\n // if (typeof el === \"string\") {\n // curr[el] = curr[el] || { _errors: [] };\n // } else if (typeof el === \"number\") {\n // const errorArray: any = [];\n // errorArray._errors = [];\n // curr[el] = curr[el] || errorArray;\n // }\n }\n else {\n curr[el] = curr[el] || { _errors: [] };\n curr[el]._errors.push(mapper(issue));\n }\n curr = curr[el];\n i++;\n }\n }\n }\n };\n processError(this);\n return fieldErrors;\n }\n toString() {\n return this.message;\n }\n get message() {\n return JSON.stringify(this.issues, null, 2);\n }\n get isEmpty() {\n return this.issues.length === 0;\n }\n flatten(mapper = (issue) => issue.message) {\n const fieldErrors = {};\n const formErrors = [];\n for (const sub of this.issues) {\n if (sub.path.length > 0) {\n fieldErrors[sub.path[0]] = fieldErrors[sub.path[0]] || [];\n fieldErrors[sub.path[0]].push(mapper(sub));\n }\n else {\n formErrors.push(mapper(sub));\n }\n }\n return { formErrors, fieldErrors };\n }\n get formErrors() {\n return this.flatten();\n }\n}\nexports.ZodError = ZodError;\nZodError.create = (issues) => {\n const error = new ZodError(issues);\n return error;\n};\nconst defaultErrorMap = (issue, _ctx) => {\n let message;\n switch (issue.code) {\n case exports.ZodIssueCode.invalid_type:\n if (issue.received === util_1.ZodParsedType.undefined) {\n message = \"Required\";\n }\n else {\n message = `Expected ${issue.expected}, received ${issue.received}`;\n }\n break;\n case exports.ZodIssueCode.invalid_literal:\n message = `Invalid literal value, expected ${JSON.stringify(issue.expected)}`;\n break;\n case exports.ZodIssueCode.unrecognized_keys:\n message = `Unrecognized key(s) in object: ${util_1.util.joinValues(issue.keys, \", \")}`;\n break;\n case exports.ZodIssueCode.invalid_union:\n message = `Invalid input`;\n break;\n case exports.ZodIssueCode.invalid_union_discriminator:\n message = `Invalid discriminator value. Expected ${util_1.util.joinValues(issue.options)}`;\n break;\n case exports.ZodIssueCode.invalid_enum_value:\n message = `Invalid enum value. Expected ${util_1.util.joinValues(issue.options)}, received '${issue.received}'`;\n break;\n case exports.ZodIssueCode.invalid_arguments:\n message = `Invalid function arguments`;\n break;\n case exports.ZodIssueCode.invalid_return_type:\n message = `Invalid function return type`;\n break;\n case exports.ZodIssueCode.invalid_date:\n message = `Invalid date`;\n break;\n case exports.ZodIssueCode.invalid_string:\n if (issue.validation !== \"regex\")\n message = `Invalid ${issue.validation}`;\n else\n message = \"Invalid\";\n break;\n case exports.ZodIssueCode.too_small:\n if (issue.type === \"array\")\n message = `Array must contain ${issue.inclusive ? `at least` : `more than`} ${issue.minimum} element(s)`;\n else if (issue.type === \"string\")\n message = `String must contain ${issue.inclusive ? `at least` : `over`} ${issue.minimum} character(s)`;\n else if (issue.type === \"number\")\n message = `Number must be greater than ${issue.inclusive ? `or equal to ` : ``}${issue.minimum}`;\n else\n message = \"Invalid input\";\n break;\n case exports.ZodIssueCode.too_big:\n if (issue.type === \"array\")\n message = `Array must contain ${issue.inclusive ? `at most` : `less than`} ${issue.maximum} element(s)`;\n else if (issue.type === \"string\")\n message = `String must contain ${issue.inclusive ? `at most` : `under`} ${issue.maximum} character(s)`;\n else if (issue.type === \"number\")\n message = `Number must be less than ${issue.inclusive ? `or equal to ` : ``}${issue.maximum}`;\n else\n message = \"Invalid input\";\n break;\n case exports.ZodIssueCode.custom:\n message = `Invalid input`;\n break;\n case exports.ZodIssueCode.invalid_intersection_types:\n message = `Intersection results could not be merged`;\n break;\n case exports.ZodIssueCode.not_multiple_of:\n message = `Number must be a multiple of ${issue.multipleOf}`;\n break;\n default:\n message = _ctx.defaultError;\n util_1.util.assertNever(issue);\n }\n return { message };\n};\nexports.defaultErrorMap = defaultErrorMap;\nexports.overrideErrorMap = exports.defaultErrorMap;\nconst setErrorMap = (map) => {\n exports.overrideErrorMap = map;\n};\nexports.setErrorMap = setErrorMap;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ZodParsedType = exports.getParsedType = void 0;\n__exportStar(require(\"./helpers/parseUtil\"), exports);\n__exportStar(require(\"./helpers/typeAliases\"), exports);\nvar util_1 = require(\"./helpers/util\");\nObject.defineProperty(exports, \"getParsedType\", { enumerable: true, get: function () { return util_1.getParsedType; } });\nObject.defineProperty(exports, \"ZodParsedType\", { enumerable: true, get: function () { return util_1.ZodParsedType; } });\n__exportStar(require(\"./types\"), exports);\n__exportStar(require(\"./ZodError\"), exports);\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.errorUtil = void 0;\nvar errorUtil;\n(function (errorUtil) {\n errorUtil.errToObj = (message) => typeof message === \"string\" ? { message } : message || {};\n errorUtil.toString = (message) => typeof message === \"string\" ? message : message === null || message === void 0 ? void 0 : message.message;\n})(errorUtil = exports.errorUtil || (exports.errorUtil = {}));\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.isAsync = exports.isValid = exports.isDirty = exports.isAborted = exports.OK = exports.DIRTY = exports.INVALID = exports.ParseStatus = exports.addIssueToContext = exports.EMPTY_PATH = exports.makeIssue = void 0;\nconst ZodError_1 = require(\"../ZodError\");\nconst makeIssue = (params) => {\n const { data, path, errorMaps, issueData } = params;\n const fullPath = [...path, ...(issueData.path || [])];\n const fullIssue = {\n ...issueData,\n path: fullPath,\n };\n let errorMessage = \"\";\n const maps = errorMaps\n .filter((m) => !!m)\n .slice()\n .reverse();\n for (const map of maps) {\n errorMessage = map(fullIssue, { data, defaultError: errorMessage }).message;\n }\n return {\n ...issueData,\n path: fullPath,\n message: issueData.message || errorMessage,\n };\n};\nexports.makeIssue = makeIssue;\nexports.EMPTY_PATH = [];\nfunction addIssueToContext(ctx, issueData) {\n const issue = exports.makeIssue({\n issueData: issueData,\n data: ctx.data,\n path: ctx.path,\n errorMaps: [\n ctx.common.contextualErrorMap,\n ctx.schemaErrorMap,\n ZodError_1.overrideErrorMap,\n ZodError_1.defaultErrorMap,\n ].filter((x) => !!x),\n });\n ctx.common.issues.push(issue);\n}\nexports.addIssueToContext = addIssueToContext;\nclass ParseStatus {\n constructor() {\n this.value = \"valid\";\n }\n dirty() {\n if (this.value === \"valid\")\n this.value = \"dirty\";\n }\n abort() {\n if (this.value !== \"aborted\")\n this.value = \"aborted\";\n }\n static mergeArray(status, results) {\n const arrayValue = [];\n for (const s of results) {\n if (s.status === \"aborted\")\n return exports.INVALID;\n if (s.status === \"dirty\")\n status.dirty();\n arrayValue.push(s.value);\n }\n return { status: status.value, value: arrayValue };\n }\n static async mergeObjectAsync(status, pairs) {\n const syncPairs = [];\n for (const pair of pairs) {\n syncPairs.push({\n key: await pair.key,\n value: await pair.value,\n });\n }\n return ParseStatus.mergeObjectSync(status, syncPairs);\n }\n static mergeObjectSync(status, pairs) {\n const finalObject = {};\n for (const pair of pairs) {\n const { key, value } = pair;\n if (key.status === \"aborted\")\n return exports.INVALID;\n if (value.status === \"aborted\")\n return exports.INVALID;\n if (key.status === \"dirty\")\n status.dirty();\n if (value.status === \"dirty\")\n status.dirty();\n if (typeof value.value !== \"undefined\" || pair.alwaysSet) {\n finalObject[key.value] = value.value;\n }\n }\n return { status: status.value, value: finalObject };\n }\n}\nexports.ParseStatus = ParseStatus;\nexports.INVALID = Object.freeze({\n status: \"aborted\",\n});\nconst DIRTY = (value) => ({ status: \"dirty\", value });\nexports.DIRTY = DIRTY;\nconst OK = (value) => ({ status: \"valid\", value });\nexports.OK = OK;\nconst isAborted = (x) => x.status === \"aborted\";\nexports.isAborted = isAborted;\nconst isDirty = (x) => x.status === \"dirty\";\nexports.isDirty = isDirty;\nconst isValid = (x) => x.status === \"valid\";\nexports.isValid = isValid;\nconst isAsync = (x) => typeof Promise !== undefined && x instanceof Promise;\nexports.isAsync = isAsync;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getParsedType = exports.ZodParsedType = exports.util = void 0;\nvar util;\n(function (util) {\n function assertNever(_x) {\n throw new Error();\n }\n util.assertNever = assertNever;\n util.arrayToEnum = (items) => {\n const obj = {};\n for (const item of items) {\n obj[item] = item;\n }\n return obj;\n };\n util.getValidEnumValues = (obj) => {\n const validKeys = util.objectKeys(obj).filter((k) => typeof obj[obj[k]] !== \"number\");\n const filtered = {};\n for (const k of validKeys) {\n filtered[k] = obj[k];\n }\n return util.objectValues(filtered);\n };\n util.objectValues = (obj) => {\n return util.objectKeys(obj).map(function (e) {\n return obj[e];\n });\n };\n util.objectKeys = typeof Object.keys === \"function\" // eslint-disable-line ban/ban\n ? (obj) => Object.keys(obj) // eslint-disable-line ban/ban\n : (object) => {\n const keys = [];\n for (const key in object) {\n if (Object.prototype.hasOwnProperty.call(object, key)) {\n keys.push(key);\n }\n }\n return keys;\n };\n util.find = (arr, checker) => {\n for (const item of arr) {\n if (checker(item))\n return item;\n }\n return undefined;\n };\n util.isInteger = typeof Number.isInteger === \"function\"\n ? (val) => Number.isInteger(val) // eslint-disable-line ban/ban\n : (val) => typeof val === \"number\" && isFinite(val) && Math.floor(val) === val;\n function joinValues(array, separator = \" | \") {\n return array\n .map((val) => (typeof val === \"string\" ? `'${val}'` : val))\n .join(separator);\n }\n util.joinValues = joinValues;\n})(util = exports.util || (exports.util = {}));\nexports.ZodParsedType = util.arrayToEnum([\n \"string\",\n \"nan\",\n \"number\",\n \"integer\",\n \"float\",\n \"boolean\",\n \"date\",\n \"bigint\",\n \"symbol\",\n \"function\",\n \"undefined\",\n \"null\",\n \"array\",\n \"object\",\n \"unknown\",\n \"promise\",\n \"void\",\n \"never\",\n \"map\",\n \"set\",\n]);\nconst getParsedType = (data) => {\n const t = typeof data;\n switch (t) {\n case \"undefined\":\n return exports.ZodParsedType.undefined;\n case \"string\":\n return exports.ZodParsedType.string;\n case \"number\":\n return isNaN(data) ? exports.ZodParsedType.nan : exports.ZodParsedType.number;\n case \"boolean\":\n return exports.ZodParsedType.boolean;\n case \"function\":\n return exports.ZodParsedType.function;\n case \"bigint\":\n return exports.ZodParsedType.bigint;\n case \"object\":\n if (Array.isArray(data)) {\n return exports.ZodParsedType.array;\n }\n if (data === null) {\n return exports.ZodParsedType.null;\n }\n if (data.then &&\n typeof data.then === \"function\" &&\n data.catch &&\n typeof data.catch === \"function\") {\n return exports.ZodParsedType.promise;\n }\n if (typeof Map !== \"undefined\" && data instanceof Map) {\n return exports.ZodParsedType.map;\n }\n if (typeof Set !== \"undefined\" && data instanceof Set) {\n return exports.ZodParsedType.set;\n }\n if (typeof Date !== \"undefined\" && data instanceof Date) {\n return exports.ZodParsedType.date;\n }\n return exports.ZodParsedType.object;\n default:\n return exports.ZodParsedType.unknown;\n }\n};\nexports.getParsedType = getParsedType;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.z = void 0;\nconst mod = __importStar(require(\"./external\"));\nexports.z = mod;\n__exportStar(require(\"./external\"), exports);\nexports.default = mod;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.intersection = exports.instanceof = exports.function = exports.enum = exports.effect = exports.discriminatedUnion = exports.date = exports.boolean = exports.bigint = exports.array = exports.any = exports.ZodFirstPartyTypeKind = exports.late = exports.ZodSchema = exports.Schema = exports.custom = exports.ZodNaN = exports.ZodDefault = exports.ZodNullable = exports.ZodOptional = exports.ZodTransformer = exports.ZodEffects = exports.ZodPromise = exports.ZodNativeEnum = exports.ZodEnum = exports.ZodLiteral = exports.ZodLazy = exports.ZodFunction = exports.ZodSet = exports.ZodMap = exports.ZodRecord = exports.ZodTuple = exports.ZodIntersection = exports.ZodDiscriminatedUnion = exports.ZodUnion = exports.ZodObject = exports.objectUtil = exports.ZodArray = exports.ZodVoid = exports.ZodNever = exports.ZodUnknown = exports.ZodAny = exports.ZodNull = exports.ZodUndefined = exports.ZodDate = exports.ZodBoolean = exports.ZodBigInt = exports.ZodNumber = exports.ZodString = exports.ZodType = void 0;\nexports.void = exports.unknown = exports.union = exports.undefined = exports.tuple = exports.transformer = exports.string = exports.strictObject = exports.set = exports.record = exports.promise = exports.preprocess = exports.ostring = exports.optional = exports.onumber = exports.oboolean = exports.object = exports.number = exports.nullable = exports.null = exports.never = exports.nativeEnum = exports.nan = exports.map = exports.literal = exports.lazy = void 0;\nconst errorUtil_1 = require(\"./helpers/errorUtil\");\nconst parseUtil_1 = require(\"./helpers/parseUtil\");\nconst util_1 = require(\"./helpers/util\");\nconst ZodError_1 = require(\"./ZodError\");\nclass ParseInputLazyPath {\n constructor(parent, value, path, key) {\n this.parent = parent;\n this.data = value;\n this._path = path;\n this._key = key;\n }\n get path() {\n return this._path.concat(this._key);\n }\n}\nconst handleResult = (ctx, result) => {\n if (parseUtil_1.isValid(result)) {\n return { success: true, data: result.value };\n }\n else {\n if (!ctx.common.issues.length) {\n throw new Error(\"Validation failed but no issues detected.\");\n }\n const error = new ZodError_1.ZodError(ctx.common.issues);\n return { success: false, error };\n }\n};\nfunction processCreateParams(params) {\n if (!params)\n return {};\n const { errorMap, invalid_type_error, required_error, description } = params;\n if (errorMap && (invalid_type_error || required_error)) {\n throw new Error(`Can't use \"invalid\" or \"required\" in conjunction with custom error map.`);\n }\n if (errorMap)\n return { errorMap: errorMap, description };\n const customMap = (iss, ctx) => {\n if (iss.code !== \"invalid_type\")\n return { message: ctx.defaultError };\n if (typeof ctx.data === \"undefined\" && required_error)\n return { message: required_error };\n if (params.invalid_type_error)\n return { message: params.invalid_type_error };\n return { message: ctx.defaultError };\n };\n return { errorMap: customMap, description };\n}\nclass ZodType {\n constructor(def) {\n /** Alias of safeParseAsync */\n this.spa = this.safeParseAsync;\n this.superRefine = this._refinement;\n this._def = def;\n this.parse = this.parse.bind(this);\n this.safeParse = this.safeParse.bind(this);\n this.parseAsync = this.parseAsync.bind(this);\n this.safeParseAsync = this.safeParseAsync.bind(this);\n this.spa = this.spa.bind(this);\n this.refine = this.refine.bind(this);\n this.refinement = this.refinement.bind(this);\n this.superRefine = this.superRefine.bind(this);\n this.optional = this.optional.bind(this);\n this.nullable = this.nullable.bind(this);\n this.nullish = this.nullish.bind(this);\n this.array = this.array.bind(this);\n this.promise = this.promise.bind(this);\n this.or = this.or.bind(this);\n this.and = this.and.bind(this);\n this.transform = this.transform.bind(this);\n this.default = this.default.bind(this);\n this.describe = this.describe.bind(this);\n this.isNullable = this.isNullable.bind(this);\n this.isOptional = this.isOptional.bind(this);\n }\n get description() {\n return this._def.description;\n }\n _getType(input) {\n return util_1.getParsedType(input.data);\n }\n _getOrReturnCtx(input, ctx) {\n return (ctx || {\n common: input.parent.common,\n data: input.data,\n parsedType: util_1.getParsedType(input.data),\n schemaErrorMap: this._def.errorMap,\n path: input.path,\n parent: input.parent,\n });\n }\n _processInputParams(input) {\n return {\n status: new parseUtil_1.ParseStatus(),\n ctx: {\n common: input.parent.common,\n data: input.data,\n parsedType: util_1.getParsedType(input.data),\n schemaErrorMap: this._def.errorMap,\n path: input.path,\n parent: input.parent,\n },\n };\n }\n _parseSync(input) {\n const result = this._parse(input);\n if (parseUtil_1.isAsync(result)) {\n throw new Error(\"Synchronous parse encountered promise.\");\n }\n return result;\n }\n _parseAsync(input) {\n const result = this._parse(input);\n return Promise.resolve(result);\n }\n parse(data, params) {\n const result = this.safeParse(data, params);\n if (result.success)\n return result.data;\n throw result.error;\n }\n safeParse(data, params) {\n var _a;\n const ctx = {\n common: {\n issues: [],\n async: (_a = params === null || params === void 0 ? void 0 : params.async) !== null && _a !== void 0 ? _a : false,\n contextualErrorMap: params === null || params === void 0 ? void 0 : params.errorMap,\n },\n path: (params === null || params === void 0 ? void 0 : params.path) || [],\n schemaErrorMap: this._def.errorMap,\n parent: null,\n data,\n parsedType: util_1.getParsedType(data),\n };\n const result = this._parseSync({ data, path: ctx.path, parent: ctx });\n return handleResult(ctx, result);\n }\n async parseAsync(data, params) {\n const result = await this.safeParseAsync(data, params);\n if (result.success)\n return result.data;\n throw result.error;\n }\n async safeParseAsync(data, params) {\n const ctx = {\n common: {\n issues: [],\n contextualErrorMap: params === null || params === void 0 ? void 0 : params.errorMap,\n async: true,\n },\n path: (params === null || params === void 0 ? void 0 : params.path) || [],\n schemaErrorMap: this._def.errorMap,\n parent: null,\n data,\n parsedType: util_1.getParsedType(data),\n };\n const maybeAsyncResult = this._parse({ data, path: [], parent: ctx });\n const result = await (parseUtil_1.isAsync(maybeAsyncResult)\n ? maybeAsyncResult\n : Promise.resolve(maybeAsyncResult));\n return handleResult(ctx, result);\n }\n refine(check, message) {\n const getIssueProperties = (val) => {\n if (typeof message === \"string\" || typeof message === \"undefined\") {\n return { message };\n }\n else if (typeof message === \"function\") {\n return message(val);\n }\n else {\n return message;\n }\n };\n return this._refinement((val, ctx) => {\n const result = check(val);\n const setError = () => ctx.addIssue({\n code: ZodError_1.ZodIssueCode.custom,\n ...getIssueProperties(val),\n });\n if (typeof Promise !== \"undefined\" && result instanceof Promise) {\n return result.then((data) => {\n if (!data) {\n setError();\n return false;\n }\n else {\n return true;\n }\n });\n }\n if (!result) {\n setError();\n return false;\n }\n else {\n return true;\n }\n });\n }\n refinement(check, refinementData) {\n return this._refinement((val, ctx) => {\n if (!check(val)) {\n ctx.addIssue(typeof refinementData === \"function\"\n ? refinementData(val, ctx)\n : refinementData);\n return false;\n }\n else {\n return true;\n }\n });\n }\n _refinement(refinement) {\n return new ZodEffects({\n schema: this,\n typeName: ZodFirstPartyTypeKind.ZodEffects,\n effect: { type: \"refinement\", refinement },\n });\n }\n optional() {\n return ZodOptional.create(this);\n }\n nullable() {\n return ZodNullable.create(this);\n }\n nullish() {\n return this.optional().nullable();\n }\n array() {\n return ZodArray.create(this);\n }\n promise() {\n return ZodPromise.create(this);\n }\n or(option) {\n return ZodUnion.create([this, option]);\n }\n and(incoming) {\n return ZodIntersection.create(this, incoming);\n }\n transform(transform) {\n return new ZodEffects({\n schema: this,\n typeName: ZodFirstPartyTypeKind.ZodEffects,\n effect: { type: \"transform\", transform },\n });\n }\n default(def) {\n const defaultValueFunc = typeof def === \"function\" ? def : () => def;\n return new ZodDefault({\n innerType: this,\n defaultValue: defaultValueFunc,\n typeName: ZodFirstPartyTypeKind.ZodDefault,\n });\n }\n describe(description) {\n const This = this.constructor;\n return new This({\n ...this._def,\n description,\n });\n }\n isOptional() {\n return this.safeParse(undefined).success;\n }\n isNullable() {\n return this.safeParse(null).success;\n }\n}\nexports.ZodType = ZodType;\nexports.Schema = ZodType;\nexports.ZodSchema = ZodType;\nconst cuidRegex = /^c[^\\s-]{8,}$/i;\nconst uuidRegex = /^([a-f0-9]{8}-[a-f0-9]{4}-[1-5][a-f0-9]{3}-[a-f0-9]{4}-[a-f0-9]{12}|00000000-0000-0000-0000-000000000000)$/i;\n// from https://stackoverflow.com/a/46181/1550155\n// old version: too slow, didn't support unicode\n// const emailRegex = /^((([a-z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])+(\\.([a-z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])+)*)|((\\x22)((((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(([\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x7f]|\\x21|[\\x23-\\x5b]|[\\x5d-\\x7e]|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])|(\\\\([\\x01-\\x09\\x0b\\x0c\\x0d-\\x7f]|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF]))))*(((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(\\x22)))@((([a-z]|\\d|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])|(([a-z]|\\d|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])([a-z]|\\d|-|\\.|_|~|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])*([a-z]|\\d|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])))\\.)+(([a-z]|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])|(([a-z]|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])([a-z]|\\d|-|\\.|_|~|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])*([a-z]|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])))$/i;\n// eslint-disable-next-line\nconst emailRegex = /^(([^<>()[\\]\\.,;:\\s@\\\"]+(\\.[^<>()[\\]\\.,;:\\s@\\\"]+)*)|(\\\".+\\\"))@(([^<>()[\\]\\.,;:\\s@\\\"]+\\.)+[^<>()[\\]\\.,;:\\s@\\\"]{2,})$/i;\nclass ZodString extends ZodType {\n constructor() {\n super(...arguments);\n this._regex = (regex, validation, message) => this.refinement((data) => regex.test(data), {\n validation,\n code: ZodError_1.ZodIssueCode.invalid_string,\n ...errorUtil_1.errorUtil.errToObj(message),\n });\n /**\n * @deprecated Use z.string().min(1) instead.\n * @see {@link ZodString.min}\n */\n this.nonempty = (message) => this.min(1, errorUtil_1.errorUtil.errToObj(message));\n this.trim = () => new ZodString({\n ...this._def,\n checks: [...this._def.checks, { kind: \"trim\" }],\n });\n }\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== util_1.ZodParsedType.string) {\n const ctx = this._getOrReturnCtx(input);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.string,\n received: ctx.parsedType,\n }\n //\n );\n return parseUtil_1.INVALID;\n }\n const status = new parseUtil_1.ParseStatus();\n let ctx = undefined;\n for (const check of this._def.checks) {\n if (check.kind === \"min\") {\n if (input.data.length < check.value) {\n ctx = this._getOrReturnCtx(input, ctx);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.too_small,\n minimum: check.value,\n type: \"string\",\n inclusive: true,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"max\") {\n if (input.data.length > check.value) {\n ctx = this._getOrReturnCtx(input, ctx);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.too_big,\n maximum: check.value,\n type: \"string\",\n inclusive: true,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"email\") {\n if (!emailRegex.test(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n parseUtil_1.addIssueToContext(ctx, {\n validation: \"email\",\n code: ZodError_1.ZodIssueCode.invalid_string,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"uuid\") {\n if (!uuidRegex.test(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n parseUtil_1.addIssueToContext(ctx, {\n validation: \"uuid\",\n code: ZodError_1.ZodIssueCode.invalid_string,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"cuid\") {\n if (!cuidRegex.test(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n parseUtil_1.addIssueToContext(ctx, {\n validation: \"cuid\",\n code: ZodError_1.ZodIssueCode.invalid_string,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"url\") {\n try {\n new URL(input.data);\n }\n catch (_a) {\n ctx = this._getOrReturnCtx(input, ctx);\n parseUtil_1.addIssueToContext(ctx, {\n validation: \"url\",\n code: ZodError_1.ZodIssueCode.invalid_string,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"regex\") {\n check.regex.lastIndex = 0;\n const testResult = check.regex.test(input.data);\n if (!testResult) {\n ctx = this._getOrReturnCtx(input, ctx);\n parseUtil_1.addIssueToContext(ctx, {\n validation: \"regex\",\n code: ZodError_1.ZodIssueCode.invalid_string,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"trim\") {\n input.data = input.data.trim();\n }\n else {\n util_1.util.assertNever(check);\n }\n }\n return { status: status.value, value: input.data };\n }\n _addCheck(check) {\n return new ZodString({\n ...this._def,\n checks: [...this._def.checks, check],\n });\n }\n email(message) {\n return this._addCheck({ kind: \"email\", ...errorUtil_1.errorUtil.errToObj(message) });\n }\n url(message) {\n return this._addCheck({ kind: \"url\", ...errorUtil_1.errorUtil.errToObj(message) });\n }\n uuid(message) {\n return this._addCheck({ kind: \"uuid\", ...errorUtil_1.errorUtil.errToObj(message) });\n }\n cuid(message) {\n return this._addCheck({ kind: \"cuid\", ...errorUtil_1.errorUtil.errToObj(message) });\n }\n regex(regex, message) {\n return this._addCheck({\n kind: \"regex\",\n regex: regex,\n ...errorUtil_1.errorUtil.errToObj(message),\n });\n }\n min(minLength, message) {\n return this._addCheck({\n kind: \"min\",\n value: minLength,\n ...errorUtil_1.errorUtil.errToObj(message),\n });\n }\n max(maxLength, message) {\n return this._addCheck({\n kind: \"max\",\n value: maxLength,\n ...errorUtil_1.errorUtil.errToObj(message),\n });\n }\n length(len, message) {\n return this.min(len, message).max(len, message);\n }\n get isEmail() {\n return !!this._def.checks.find((ch) => ch.kind === \"email\");\n }\n get isURL() {\n return !!this._def.checks.find((ch) => ch.kind === \"url\");\n }\n get isUUID() {\n return !!this._def.checks.find((ch) => ch.kind === \"uuid\");\n }\n get isCUID() {\n return !!this._def.checks.find((ch) => ch.kind === \"cuid\");\n }\n get minLength() {\n let min = -Infinity;\n this._def.checks.map((ch) => {\n if (ch.kind === \"min\") {\n if (min === null || ch.value > min) {\n min = ch.value;\n }\n }\n });\n return min;\n }\n get maxLength() {\n let max = null;\n this._def.checks.map((ch) => {\n if (ch.kind === \"max\") {\n if (max === null || ch.value < max) {\n max = ch.value;\n }\n }\n });\n return max;\n }\n}\nexports.ZodString = ZodString;\nZodString.create = (params) => {\n return new ZodString({\n checks: [],\n typeName: ZodFirstPartyTypeKind.ZodString,\n ...processCreateParams(params),\n });\n};\n// https://stackoverflow.com/questions/3966484/why-does-modulus-operator-return-fractional-number-in-javascript/31711034#31711034\nfunction floatSafeRemainder(val, step) {\n const valDecCount = (val.toString().split(\".\")[1] || \"\").length;\n const stepDecCount = (step.toString().split(\".\")[1] || \"\").length;\n const decCount = valDecCount > stepDecCount ? valDecCount : stepDecCount;\n const valInt = parseInt(val.toFixed(decCount).replace(\".\", \"\"));\n const stepInt = parseInt(step.toFixed(decCount).replace(\".\", \"\"));\n return (valInt % stepInt) / Math.pow(10, decCount);\n}\nclass ZodNumber extends ZodType {\n constructor() {\n super(...arguments);\n this.min = this.gte;\n this.max = this.lte;\n this.step = this.multipleOf;\n }\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== util_1.ZodParsedType.number) {\n const ctx = this._getOrReturnCtx(input);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.number,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n let ctx = undefined;\n const status = new parseUtil_1.ParseStatus();\n for (const check of this._def.checks) {\n if (check.kind === \"int\") {\n if (!util_1.util.isInteger(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: \"integer\",\n received: \"float\",\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"min\") {\n const tooSmall = check.inclusive\n ? input.data < check.value\n : input.data <= check.value;\n if (tooSmall) {\n ctx = this._getOrReturnCtx(input, ctx);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.too_small,\n minimum: check.value,\n type: \"number\",\n inclusive: check.inclusive,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"max\") {\n const tooBig = check.inclusive\n ? input.data > check.value\n : input.data >= check.value;\n if (tooBig) {\n ctx = this._getOrReturnCtx(input, ctx);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.too_big,\n maximum: check.value,\n type: \"number\",\n inclusive: check.inclusive,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"multipleOf\") {\n if (floatSafeRemainder(input.data, check.value) !== 0) {\n ctx = this._getOrReturnCtx(input, ctx);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.not_multiple_of,\n multipleOf: check.value,\n message: check.message,\n });\n status.dirty();\n }\n }\n else {\n util_1.util.assertNever(check);\n }\n }\n return { status: status.value, value: input.data };\n }\n gte(value, message) {\n return this.setLimit(\"min\", value, true, errorUtil_1.errorUtil.toString(message));\n }\n gt(value, message) {\n return this.setLimit(\"min\", value, false, errorUtil_1.errorUtil.toString(message));\n }\n lte(value, message) {\n return this.setLimit(\"max\", value, true, errorUtil_1.errorUtil.toString(message));\n }\n lt(value, message) {\n return this.setLimit(\"max\", value, false, errorUtil_1.errorUtil.toString(message));\n }\n setLimit(kind, value, inclusive, message) {\n return new ZodNumber({\n ...this._def,\n checks: [\n ...this._def.checks,\n {\n kind,\n value,\n inclusive,\n message: errorUtil_1.errorUtil.toString(message),\n },\n ],\n });\n }\n _addCheck(check) {\n return new ZodNumber({\n ...this._def,\n checks: [...this._def.checks, check],\n });\n }\n int(message) {\n return this._addCheck({\n kind: \"int\",\n message: errorUtil_1.errorUtil.toString(message),\n });\n }\n positive(message) {\n return this._addCheck({\n kind: \"min\",\n value: 0,\n inclusive: false,\n message: errorUtil_1.errorUtil.toString(message),\n });\n }\n negative(message) {\n return this._addCheck({\n kind: \"max\",\n value: 0,\n inclusive: false,\n message: errorUtil_1.errorUtil.toString(message),\n });\n }\n nonpositive(message) {\n return this._addCheck({\n kind: \"max\",\n value: 0,\n inclusive: true,\n message: errorUtil_1.errorUtil.toString(message),\n });\n }\n nonnegative(message) {\n return this._addCheck({\n kind: \"min\",\n value: 0,\n inclusive: true,\n message: errorUtil_1.errorUtil.toString(message),\n });\n }\n multipleOf(value, message) {\n return this._addCheck({\n kind: \"multipleOf\",\n value: value,\n message: errorUtil_1.errorUtil.toString(message),\n });\n }\n get minValue() {\n let min = null;\n for (const ch of this._def.checks) {\n if (ch.kind === \"min\") {\n if (min === null || ch.value > min)\n min = ch.value;\n }\n }\n return min;\n }\n get maxValue() {\n let max = null;\n for (const ch of this._def.checks) {\n if (ch.kind === \"max\") {\n if (max === null || ch.value < max)\n max = ch.value;\n }\n }\n return max;\n }\n get isInt() {\n return !!this._def.checks.find((ch) => ch.kind === \"int\");\n }\n}\nexports.ZodNumber = ZodNumber;\nZodNumber.create = (params) => {\n return new ZodNumber({\n checks: [],\n typeName: ZodFirstPartyTypeKind.ZodNumber,\n ...processCreateParams(params),\n });\n};\nclass ZodBigInt extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== util_1.ZodParsedType.bigint) {\n const ctx = this._getOrReturnCtx(input);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.bigint,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n return parseUtil_1.OK(input.data);\n }\n}\nexports.ZodBigInt = ZodBigInt;\nZodBigInt.create = (params) => {\n return new ZodBigInt({\n typeName: ZodFirstPartyTypeKind.ZodBigInt,\n ...processCreateParams(params),\n });\n};\nclass ZodBoolean extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== util_1.ZodParsedType.boolean) {\n const ctx = this._getOrReturnCtx(input);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.boolean,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n return parseUtil_1.OK(input.data);\n }\n}\nexports.ZodBoolean = ZodBoolean;\nZodBoolean.create = (params) => {\n return new ZodBoolean({\n typeName: ZodFirstPartyTypeKind.ZodBoolean,\n ...processCreateParams(params),\n });\n};\nclass ZodDate extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== util_1.ZodParsedType.date) {\n const ctx = this._getOrReturnCtx(input);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.date,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n if (isNaN(input.data.getTime())) {\n const ctx = this._getOrReturnCtx(input);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_date,\n });\n return parseUtil_1.INVALID;\n }\n return {\n status: \"valid\",\n value: new Date(input.data.getTime()),\n };\n }\n}\nexports.ZodDate = ZodDate;\nZodDate.create = (params) => {\n return new ZodDate({\n typeName: ZodFirstPartyTypeKind.ZodDate,\n ...processCreateParams(params),\n });\n};\nclass ZodUndefined extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== util_1.ZodParsedType.undefined) {\n const ctx = this._getOrReturnCtx(input);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.undefined,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n return parseUtil_1.OK(input.data);\n }\n}\nexports.ZodUndefined = ZodUndefined;\nZodUndefined.create = (params) => {\n return new ZodUndefined({\n typeName: ZodFirstPartyTypeKind.ZodUndefined,\n ...processCreateParams(params),\n });\n};\nclass ZodNull extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== util_1.ZodParsedType.null) {\n const ctx = this._getOrReturnCtx(input);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.null,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n return parseUtil_1.OK(input.data);\n }\n}\nexports.ZodNull = ZodNull;\nZodNull.create = (params) => {\n return new ZodNull({\n typeName: ZodFirstPartyTypeKind.ZodNull,\n ...processCreateParams(params),\n });\n};\nclass ZodAny extends ZodType {\n constructor() {\n super(...arguments);\n // to prevent instances of other classes from extending ZodAny. this causes issues with catchall in ZodObject.\n this._any = true;\n }\n _parse(input) {\n return parseUtil_1.OK(input.data);\n }\n}\nexports.ZodAny = ZodAny;\nZodAny.create = (params) => {\n return new ZodAny({\n typeName: ZodFirstPartyTypeKind.ZodAny,\n ...processCreateParams(params),\n });\n};\nclass ZodUnknown extends ZodType {\n constructor() {\n super(...arguments);\n // required\n this._unknown = true;\n }\n _parse(input) {\n return parseUtil_1.OK(input.data);\n }\n}\nexports.ZodUnknown = ZodUnknown;\nZodUnknown.create = (params) => {\n return new ZodUnknown({\n typeName: ZodFirstPartyTypeKind.ZodUnknown,\n ...processCreateParams(params),\n });\n};\nclass ZodNever extends ZodType {\n _parse(input) {\n const ctx = this._getOrReturnCtx(input);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.never,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n}\nexports.ZodNever = ZodNever;\nZodNever.create = (params) => {\n return new ZodNever({\n typeName: ZodFirstPartyTypeKind.ZodNever,\n ...processCreateParams(params),\n });\n};\nclass ZodVoid extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== util_1.ZodParsedType.undefined) {\n const ctx = this._getOrReturnCtx(input);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.void,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n return parseUtil_1.OK(input.data);\n }\n}\nexports.ZodVoid = ZodVoid;\nZodVoid.create = (params) => {\n return new ZodVoid({\n typeName: ZodFirstPartyTypeKind.ZodVoid,\n ...processCreateParams(params),\n });\n};\nclass ZodArray extends ZodType {\n _parse(input) {\n const { ctx, status } = this._processInputParams(input);\n const def = this._def;\n if (ctx.parsedType !== util_1.ZodParsedType.array) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.array,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n if (def.minLength !== null) {\n if (ctx.data.length < def.minLength.value) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.too_small,\n minimum: def.minLength.value,\n type: \"array\",\n inclusive: true,\n message: def.minLength.message,\n });\n status.dirty();\n }\n }\n if (def.maxLength !== null) {\n if (ctx.data.length > def.maxLength.value) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.too_big,\n maximum: def.maxLength.value,\n type: \"array\",\n inclusive: true,\n message: def.maxLength.message,\n });\n status.dirty();\n }\n }\n if (ctx.common.async) {\n return Promise.all(ctx.data.map((item, i) => {\n return def.type._parseAsync(new ParseInputLazyPath(ctx, item, ctx.path, i));\n })).then((result) => {\n return parseUtil_1.ParseStatus.mergeArray(status, result);\n });\n }\n const result = ctx.data.map((item, i) => {\n return def.type._parseSync(new ParseInputLazyPath(ctx, item, ctx.path, i));\n });\n return parseUtil_1.ParseStatus.mergeArray(status, result);\n }\n get element() {\n return this._def.type;\n }\n min(minLength, message) {\n return new ZodArray({\n ...this._def,\n minLength: { value: minLength, message: errorUtil_1.errorUtil.toString(message) },\n });\n }\n max(maxLength, message) {\n return new ZodArray({\n ...this._def,\n maxLength: { value: maxLength, message: errorUtil_1.errorUtil.toString(message) },\n });\n }\n length(len, message) {\n return this.min(len, message).max(len, message);\n }\n nonempty(message) {\n return this.min(1, message);\n }\n}\nexports.ZodArray = ZodArray;\nZodArray.create = (schema, params) => {\n return new ZodArray({\n type: schema,\n minLength: null,\n maxLength: null,\n typeName: ZodFirstPartyTypeKind.ZodArray,\n ...processCreateParams(params),\n });\n};\n/////////////////////////////////////////\n/////////////////////////////////////////\n////////// //////////\n////////// ZodObject //////////\n////////// //////////\n/////////////////////////////////////////\n/////////////////////////////////////////\nvar objectUtil;\n(function (objectUtil) {\n objectUtil.mergeShapes = (first, second) => {\n return {\n ...first,\n ...second,\n };\n };\n})(objectUtil = exports.objectUtil || (exports.objectUtil = {}));\nconst AugmentFactory = (def) => (augmentation) => {\n return new ZodObject({\n ...def,\n shape: () => ({\n ...def.shape(),\n ...augmentation,\n }),\n });\n};\nfunction deepPartialify(schema) {\n if (schema instanceof ZodObject) {\n const newShape = {};\n for (const key in schema.shape) {\n const fieldSchema = schema.shape[key];\n newShape[key] = ZodOptional.create(deepPartialify(fieldSchema));\n }\n return new ZodObject({\n ...schema._def,\n shape: () => newShape,\n });\n }\n else if (schema instanceof ZodArray) {\n return ZodArray.create(deepPartialify(schema.element));\n }\n else if (schema instanceof ZodOptional) {\n return ZodOptional.create(deepPartialify(schema.unwrap()));\n }\n else if (schema instanceof ZodNullable) {\n return ZodNullable.create(deepPartialify(schema.unwrap()));\n }\n else if (schema instanceof ZodTuple) {\n return ZodTuple.create(schema.items.map((item) => deepPartialify(item)));\n }\n else {\n return schema;\n }\n}\nclass ZodObject extends ZodType {\n constructor() {\n super(...arguments);\n this._cached = null;\n /**\n * @deprecated In most cases, this is no longer needed - unknown properties are now silently stripped.\n * If you want to pass through unknown properties, use `.passthrough()` instead.\n */\n this.nonstrict = this.passthrough;\n this.augment = AugmentFactory(this._def);\n this.extend = AugmentFactory(this._def);\n }\n _getCached() {\n if (this._cached !== null)\n return this._cached;\n const shape = this._def.shape();\n const keys = util_1.util.objectKeys(shape);\n return (this._cached = { shape, keys });\n }\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== util_1.ZodParsedType.object) {\n const ctx = this._getOrReturnCtx(input);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.object,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n const { status, ctx } = this._processInputParams(input);\n const { shape, keys: shapeKeys } = this._getCached();\n const extraKeys = [];\n for (const key in ctx.data) {\n if (!shapeKeys.includes(key)) {\n extraKeys.push(key);\n }\n }\n const pairs = [];\n for (const key of shapeKeys) {\n const keyValidator = shape[key];\n const value = ctx.data[key];\n pairs.push({\n key: { status: \"valid\", value: key },\n value: keyValidator._parse(new ParseInputLazyPath(ctx, value, ctx.path, key)),\n alwaysSet: key in ctx.data,\n });\n }\n if (this._def.catchall instanceof ZodNever) {\n const unknownKeys = this._def.unknownKeys;\n if (unknownKeys === \"passthrough\") {\n for (const key of extraKeys) {\n pairs.push({\n key: { status: \"valid\", value: key },\n value: { status: \"valid\", value: ctx.data[key] },\n });\n }\n }\n else if (unknownKeys === \"strict\") {\n if (extraKeys.length > 0) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.unrecognized_keys,\n keys: extraKeys,\n });\n status.dirty();\n }\n }\n else if (unknownKeys === \"strip\") {\n }\n else {\n throw new Error(`Internal ZodObject error: invalid unknownKeys value.`);\n }\n }\n else {\n // run catchall validation\n const catchall = this._def.catchall;\n for (const key of extraKeys) {\n const value = ctx.data[key];\n pairs.push({\n key: { status: \"valid\", value: key },\n value: catchall._parse(new ParseInputLazyPath(ctx, value, ctx.path, key) //, ctx.child(key), value, getParsedType(value)\n ),\n alwaysSet: key in ctx.data,\n });\n }\n }\n if (ctx.common.async) {\n return Promise.resolve()\n .then(async () => {\n const syncPairs = [];\n for (const pair of pairs) {\n const key = await pair.key;\n syncPairs.push({\n key,\n value: await pair.value,\n alwaysSet: pair.alwaysSet,\n });\n }\n return syncPairs;\n })\n .then((syncPairs) => {\n return parseUtil_1.ParseStatus.mergeObjectSync(status, syncPairs);\n });\n }\n else {\n return parseUtil_1.ParseStatus.mergeObjectSync(status, pairs);\n }\n }\n get shape() {\n return this._def.shape();\n }\n strict(message) {\n errorUtil_1.errorUtil.errToObj;\n return new ZodObject({\n ...this._def,\n unknownKeys: \"strict\",\n ...(message !== undefined\n ? {\n errorMap: (issue, ctx) => {\n var _a, _b, _c, _d;\n const defaultError = (_c = (_b = (_a = this._def).errorMap) === null || _b === void 0 ? void 0 : _b.call(_a, issue, ctx).message) !== null && _c !== void 0 ? _c : ctx.defaultError;\n if (issue.code === \"unrecognized_keys\")\n return {\n message: (_d = errorUtil_1.errorUtil.errToObj(message).message) !== null && _d !== void 0 ? _d : defaultError,\n };\n return {\n message: defaultError,\n };\n },\n }\n : {}),\n });\n }\n strip() {\n return new ZodObject({\n ...this._def,\n unknownKeys: \"strip\",\n });\n }\n passthrough() {\n return new ZodObject({\n ...this._def,\n unknownKeys: \"passthrough\",\n });\n }\n setKey(key, schema) {\n return this.augment({ [key]: schema });\n }\n /**\n * Prior to zod@1.0.12 there was a bug in the\n * inferred type of merged objects. Please\n * upgrade if you are experiencing issues.\n */\n merge(merging) {\n // const mergedShape = objectUtil.mergeShapes(\n // this._def.shape(),\n // merging._def.shape()\n // );\n const merged = new ZodObject({\n unknownKeys: merging._def.unknownKeys,\n catchall: merging._def.catchall,\n shape: () => objectUtil.mergeShapes(this._def.shape(), merging._def.shape()),\n typeName: ZodFirstPartyTypeKind.ZodObject,\n });\n return merged;\n }\n catchall(index) {\n return new ZodObject({\n ...this._def,\n catchall: index,\n });\n }\n pick(mask) {\n const shape = {};\n util_1.util.objectKeys(mask).map((key) => {\n // only add to shape if key corresponds to an element of the current shape\n if (this.shape[key])\n shape[key] = this.shape[key];\n });\n return new ZodObject({\n ...this._def,\n shape: () => shape,\n });\n }\n omit(mask) {\n const shape = {};\n util_1.util.objectKeys(this.shape).map((key) => {\n if (util_1.util.objectKeys(mask).indexOf(key) === -1) {\n shape[key] = this.shape[key];\n }\n });\n return new ZodObject({\n ...this._def,\n shape: () => shape,\n });\n }\n deepPartial() {\n return deepPartialify(this);\n }\n partial(mask) {\n const newShape = {};\n if (mask) {\n util_1.util.objectKeys(this.shape).map((key) => {\n if (util_1.util.objectKeys(mask).indexOf(key) === -1) {\n newShape[key] = this.shape[key];\n }\n else {\n newShape[key] = this.shape[key].optional();\n }\n });\n return new ZodObject({\n ...this._def,\n shape: () => newShape,\n });\n }\n else {\n for (const key in this.shape) {\n const fieldSchema = this.shape[key];\n newShape[key] = fieldSchema.optional();\n }\n }\n return new ZodObject({\n ...this._def,\n shape: () => newShape,\n });\n }\n required() {\n const newShape = {};\n for (const key in this.shape) {\n const fieldSchema = this.shape[key];\n let newField = fieldSchema;\n while (newField instanceof ZodOptional) {\n newField = newField._def.innerType;\n }\n newShape[key] = newField;\n }\n return new ZodObject({\n ...this._def,\n shape: () => newShape,\n });\n }\n}\nexports.ZodObject = ZodObject;\nZodObject.create = (shape, params) => {\n return new ZodObject({\n shape: () => shape,\n unknownKeys: \"strip\",\n catchall: ZodNever.create(),\n typeName: ZodFirstPartyTypeKind.ZodObject,\n ...processCreateParams(params),\n });\n};\nZodObject.strictCreate = (shape, params) => {\n return new ZodObject({\n shape: () => shape,\n unknownKeys: \"strict\",\n catchall: ZodNever.create(),\n typeName: ZodFirstPartyTypeKind.ZodObject,\n ...processCreateParams(params),\n });\n};\nZodObject.lazycreate = (shape, params) => {\n return new ZodObject({\n shape,\n unknownKeys: \"strip\",\n catchall: ZodNever.create(),\n typeName: ZodFirstPartyTypeKind.ZodObject,\n ...processCreateParams(params),\n });\n};\nclass ZodUnion extends ZodType {\n _parse(input) {\n const { ctx } = this._processInputParams(input);\n const options = this._def.options;\n function handleResults(results) {\n // return first issue-free validation if it exists\n for (const result of results) {\n if (result.result.status === \"valid\") {\n return result.result;\n }\n }\n for (const result of results) {\n if (result.result.status === \"dirty\") {\n // add issues from dirty option\n ctx.common.issues.push(...result.ctx.common.issues);\n return result.result;\n }\n }\n // return invalid\n const unionErrors = results.map((result) => new ZodError_1.ZodError(result.ctx.common.issues));\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_union,\n unionErrors,\n });\n return parseUtil_1.INVALID;\n }\n if (ctx.common.async) {\n return Promise.all(options.map(async (option) => {\n const childCtx = {\n ...ctx,\n common: {\n ...ctx.common,\n issues: [],\n },\n parent: null,\n };\n return {\n result: await option._parseAsync({\n data: ctx.data,\n path: ctx.path,\n parent: childCtx,\n }),\n ctx: childCtx,\n };\n })).then(handleResults);\n }\n else {\n let dirty = undefined;\n const issues = [];\n for (const option of options) {\n const childCtx = {\n ...ctx,\n common: {\n ...ctx.common,\n issues: [],\n },\n parent: null,\n };\n const result = option._parseSync({\n data: ctx.data,\n path: ctx.path,\n parent: childCtx,\n });\n if (result.status === \"valid\") {\n return result;\n }\n else if (result.status === \"dirty\" && !dirty) {\n dirty = { result, ctx: childCtx };\n }\n if (childCtx.common.issues.length) {\n issues.push(childCtx.common.issues);\n }\n }\n if (dirty) {\n ctx.common.issues.push(...dirty.ctx.common.issues);\n return dirty.result;\n }\n const unionErrors = issues.map((issues) => new ZodError_1.ZodError(issues));\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_union,\n unionErrors,\n });\n return parseUtil_1.INVALID;\n }\n }\n get options() {\n return this._def.options;\n }\n}\nexports.ZodUnion = ZodUnion;\nZodUnion.create = (types, params) => {\n return new ZodUnion({\n options: types,\n typeName: ZodFirstPartyTypeKind.ZodUnion,\n ...processCreateParams(params),\n });\n};\nclass ZodDiscriminatedUnion extends ZodType {\n _parse(input) {\n const { ctx } = this._processInputParams(input);\n if (ctx.parsedType !== util_1.ZodParsedType.object) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.object,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n const discriminator = this.discriminator;\n const discriminatorValue = ctx.data[discriminator];\n const option = this.options.get(discriminatorValue);\n if (!option) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_union_discriminator,\n options: this.validDiscriminatorValues,\n path: [discriminator],\n });\n return parseUtil_1.INVALID;\n }\n if (ctx.common.async) {\n return option._parseAsync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx,\n });\n }\n else {\n return option._parseSync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx,\n });\n }\n }\n get discriminator() {\n return this._def.discriminator;\n }\n get validDiscriminatorValues() {\n return Array.from(this.options.keys());\n }\n get options() {\n return this._def.options;\n }\n /**\n * The constructor of the discriminated union schema. Its behaviour is very similar to that of the normal z.union() constructor.\n * However, it only allows a union of objects, all of which need to share a discriminator property. This property must\n * have a different value for each object in the union.\n * @param discriminator the name of the discriminator property\n * @param types an array of object schemas\n * @param params\n */\n static create(discriminator, types, params) {\n // Get all the valid discriminator values\n const options = new Map();\n try {\n types.forEach((type) => {\n const discriminatorValue = type.shape[discriminator].value;\n options.set(discriminatorValue, type);\n });\n }\n catch (e) {\n throw new Error(\"The discriminator value could not be extracted from all the provided schemas\");\n }\n // Assert that all the discriminator values are unique\n if (options.size !== types.length) {\n throw new Error(\"Some of the discriminator values are not unique\");\n }\n return new ZodDiscriminatedUnion({\n typeName: ZodFirstPartyTypeKind.ZodDiscriminatedUnion,\n discriminator,\n options,\n ...processCreateParams(params),\n });\n }\n}\nexports.ZodDiscriminatedUnion = ZodDiscriminatedUnion;\nfunction mergeValues(a, b) {\n const aType = util_1.getParsedType(a);\n const bType = util_1.getParsedType(b);\n if (a === b) {\n return { valid: true, data: a };\n }\n else if (aType === util_1.ZodParsedType.object && bType === util_1.ZodParsedType.object) {\n const bKeys = util_1.util.objectKeys(b);\n const sharedKeys = util_1.util\n .objectKeys(a)\n .filter((key) => bKeys.indexOf(key) !== -1);\n const newObj = { ...a, ...b };\n for (const key of sharedKeys) {\n const sharedValue = mergeValues(a[key], b[key]);\n if (!sharedValue.valid) {\n return { valid: false };\n }\n newObj[key] = sharedValue.data;\n }\n return { valid: true, data: newObj };\n }\n else if (aType === util_1.ZodParsedType.array && bType === util_1.ZodParsedType.array) {\n if (a.length !== b.length) {\n return { valid: false };\n }\n const newArray = [];\n for (let index = 0; index < a.length; index++) {\n const itemA = a[index];\n const itemB = b[index];\n const sharedValue = mergeValues(itemA, itemB);\n if (!sharedValue.valid) {\n return { valid: false };\n }\n newArray.push(sharedValue.data);\n }\n return { valid: true, data: newArray };\n }\n else if (aType === util_1.ZodParsedType.date &&\n bType === util_1.ZodParsedType.date &&\n +a === +b) {\n return { valid: true, data: a };\n }\n else {\n return { valid: false };\n }\n}\nclass ZodIntersection extends ZodType {\n _parse(input) {\n const { status, ctx } = this._processInputParams(input);\n const handleParsed = (parsedLeft, parsedRight) => {\n if (parseUtil_1.isAborted(parsedLeft) || parseUtil_1.isAborted(parsedRight)) {\n return parseUtil_1.INVALID;\n }\n const merged = mergeValues(parsedLeft.value, parsedRight.value);\n if (!merged.valid) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_intersection_types,\n });\n return parseUtil_1.INVALID;\n }\n if (parseUtil_1.isDirty(parsedLeft) || parseUtil_1.isDirty(parsedRight)) {\n status.dirty();\n }\n return { status: status.value, value: merged.data };\n };\n if (ctx.common.async) {\n return Promise.all([\n this._def.left._parseAsync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx,\n }),\n this._def.right._parseAsync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx,\n }),\n ]).then(([left, right]) => handleParsed(left, right));\n }\n else {\n return handleParsed(this._def.left._parseSync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx,\n }), this._def.right._parseSync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx,\n }));\n }\n }\n}\nexports.ZodIntersection = ZodIntersection;\nZodIntersection.create = (left, right, params) => {\n return new ZodIntersection({\n left: left,\n right: right,\n typeName: ZodFirstPartyTypeKind.ZodIntersection,\n ...processCreateParams(params),\n });\n};\nclass ZodTuple extends ZodType {\n _parse(input) {\n const { status, ctx } = this._processInputParams(input);\n if (ctx.parsedType !== util_1.ZodParsedType.array) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.array,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n if (ctx.data.length < this._def.items.length) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.too_small,\n minimum: this._def.items.length,\n inclusive: true,\n type: \"array\",\n });\n return parseUtil_1.INVALID;\n }\n const rest = this._def.rest;\n if (!rest && ctx.data.length > this._def.items.length) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.too_big,\n maximum: this._def.items.length,\n inclusive: true,\n type: \"array\",\n });\n status.dirty();\n }\n const items = ctx.data\n .map((item, itemIndex) => {\n const schema = this._def.items[itemIndex] || this._def.rest;\n if (!schema)\n return null;\n return schema._parse(new ParseInputLazyPath(ctx, item, ctx.path, itemIndex));\n })\n .filter((x) => !!x); // filter nulls\n if (ctx.common.async) {\n return Promise.all(items).then((results) => {\n return parseUtil_1.ParseStatus.mergeArray(status, results);\n });\n }\n else {\n return parseUtil_1.ParseStatus.mergeArray(status, items);\n }\n }\n get items() {\n return this._def.items;\n }\n rest(rest) {\n return new ZodTuple({\n ...this._def,\n rest,\n });\n }\n}\nexports.ZodTuple = ZodTuple;\nZodTuple.create = (schemas, params) => {\n return new ZodTuple({\n items: schemas,\n typeName: ZodFirstPartyTypeKind.ZodTuple,\n rest: null,\n ...processCreateParams(params),\n });\n};\nclass ZodRecord extends ZodType {\n get keySchema() {\n return this._def.keyType;\n }\n get valueSchema() {\n return this._def.valueType;\n }\n _parse(input) {\n const { status, ctx } = this._processInputParams(input);\n if (ctx.parsedType !== util_1.ZodParsedType.object) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.object,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n const pairs = [];\n const keyType = this._def.keyType;\n const valueType = this._def.valueType;\n for (const key in ctx.data) {\n pairs.push({\n key: keyType._parse(new ParseInputLazyPath(ctx, key, ctx.path, key)),\n value: valueType._parse(new ParseInputLazyPath(ctx, ctx.data[key], ctx.path, key)),\n });\n }\n if (ctx.common.async) {\n return parseUtil_1.ParseStatus.mergeObjectAsync(status, pairs);\n }\n else {\n return parseUtil_1.ParseStatus.mergeObjectSync(status, pairs);\n }\n }\n get element() {\n return this._def.valueType;\n }\n static create(first, second, third) {\n if (second instanceof ZodType) {\n return new ZodRecord({\n keyType: first,\n valueType: second,\n typeName: ZodFirstPartyTypeKind.ZodRecord,\n ...processCreateParams(third),\n });\n }\n return new ZodRecord({\n keyType: ZodString.create(),\n valueType: first,\n typeName: ZodFirstPartyTypeKind.ZodRecord,\n ...processCreateParams(second),\n });\n }\n}\nexports.ZodRecord = ZodRecord;\nclass ZodMap extends ZodType {\n _parse(input) {\n const { status, ctx } = this._processInputParams(input);\n if (ctx.parsedType !== util_1.ZodParsedType.map) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.map,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n const keyType = this._def.keyType;\n const valueType = this._def.valueType;\n const pairs = [...ctx.data.entries()].map(([key, value], index) => {\n return {\n key: keyType._parse(new ParseInputLazyPath(ctx, key, ctx.path, [index, \"key\"])),\n value: valueType._parse(new ParseInputLazyPath(ctx, value, ctx.path, [index, \"value\"])),\n };\n });\n if (ctx.common.async) {\n const finalMap = new Map();\n return Promise.resolve().then(async () => {\n for (const pair of pairs) {\n const key = await pair.key;\n const value = await pair.value;\n if (key.status === \"aborted\" || value.status === \"aborted\") {\n return parseUtil_1.INVALID;\n }\n if (key.status === \"dirty\" || value.status === \"dirty\") {\n status.dirty();\n }\n finalMap.set(key.value, value.value);\n }\n return { status: status.value, value: finalMap };\n });\n }\n else {\n const finalMap = new Map();\n for (const pair of pairs) {\n const key = pair.key;\n const value = pair.value;\n if (key.status === \"aborted\" || value.status === \"aborted\") {\n return parseUtil_1.INVALID;\n }\n if (key.status === \"dirty\" || value.status === \"dirty\") {\n status.dirty();\n }\n finalMap.set(key.value, value.value);\n }\n return { status: status.value, value: finalMap };\n }\n }\n}\nexports.ZodMap = ZodMap;\nZodMap.create = (keyType, valueType, params) => {\n return new ZodMap({\n valueType,\n keyType,\n typeName: ZodFirstPartyTypeKind.ZodMap,\n ...processCreateParams(params),\n });\n};\nclass ZodSet extends ZodType {\n _parse(input) {\n const { status, ctx } = this._processInputParams(input);\n if (ctx.parsedType !== util_1.ZodParsedType.set) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.set,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n const def = this._def;\n if (def.minSize !== null) {\n if (ctx.data.size < def.minSize.value) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.too_small,\n minimum: def.minSize.value,\n type: \"set\",\n inclusive: true,\n message: def.minSize.message,\n });\n status.dirty();\n }\n }\n if (def.maxSize !== null) {\n if (ctx.data.size > def.maxSize.value) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.too_big,\n maximum: def.maxSize.value,\n type: \"set\",\n inclusive: true,\n message: def.maxSize.message,\n });\n status.dirty();\n }\n }\n const valueType = this._def.valueType;\n function finalizeSet(elements) {\n const parsedSet = new Set();\n for (const element of elements) {\n if (element.status === \"aborted\")\n return parseUtil_1.INVALID;\n if (element.status === \"dirty\")\n status.dirty();\n parsedSet.add(element.value);\n }\n return { status: status.value, value: parsedSet };\n }\n const elements = [...ctx.data.values()].map((item, i) => valueType._parse(new ParseInputLazyPath(ctx, item, ctx.path, i)));\n if (ctx.common.async) {\n return Promise.all(elements).then((elements) => finalizeSet(elements));\n }\n else {\n return finalizeSet(elements);\n }\n }\n min(minSize, message) {\n return new ZodSet({\n ...this._def,\n minSize: { value: minSize, message: errorUtil_1.errorUtil.toString(message) },\n });\n }\n max(maxSize, message) {\n return new ZodSet({\n ...this._def,\n maxSize: { value: maxSize, message: errorUtil_1.errorUtil.toString(message) },\n });\n }\n size(size, message) {\n return this.min(size, message).max(size, message);\n }\n nonempty(message) {\n return this.min(1, message);\n }\n}\nexports.ZodSet = ZodSet;\nZodSet.create = (valueType, params) => {\n return new ZodSet({\n valueType,\n minSize: null,\n maxSize: null,\n typeName: ZodFirstPartyTypeKind.ZodSet,\n ...processCreateParams(params),\n });\n};\nclass ZodFunction extends ZodType {\n constructor() {\n super(...arguments);\n this.validate = this.implement;\n }\n _parse(input) {\n const { ctx } = this._processInputParams(input);\n if (ctx.parsedType !== util_1.ZodParsedType.function) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.function,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n function makeArgsIssue(args, error) {\n return parseUtil_1.makeIssue({\n data: args,\n path: ctx.path,\n errorMaps: [\n ctx.common.contextualErrorMap,\n ctx.schemaErrorMap,\n ZodError_1.overrideErrorMap,\n ZodError_1.defaultErrorMap,\n ].filter((x) => !!x),\n issueData: {\n code: ZodError_1.ZodIssueCode.invalid_arguments,\n argumentsError: error,\n },\n });\n }\n function makeReturnsIssue(returns, error) {\n return parseUtil_1.makeIssue({\n data: returns,\n path: ctx.path,\n errorMaps: [\n ctx.common.contextualErrorMap,\n ctx.schemaErrorMap,\n ZodError_1.overrideErrorMap,\n ZodError_1.defaultErrorMap,\n ].filter((x) => !!x),\n issueData: {\n code: ZodError_1.ZodIssueCode.invalid_return_type,\n returnTypeError: error,\n },\n });\n }\n const params = { errorMap: ctx.common.contextualErrorMap };\n const fn = ctx.data;\n if (this._def.returns instanceof ZodPromise) {\n return parseUtil_1.OK(async (...args) => {\n const error = new ZodError_1.ZodError([]);\n const parsedArgs = await this._def.args\n .parseAsync(args, params)\n .catch((e) => {\n error.addIssue(makeArgsIssue(args, e));\n throw error;\n });\n const result = await fn(...parsedArgs);\n const parsedReturns = await this._def.returns._def.type\n .parseAsync(result, params)\n .catch((e) => {\n error.addIssue(makeReturnsIssue(result, e));\n throw error;\n });\n return parsedReturns;\n });\n }\n else {\n return parseUtil_1.OK((...args) => {\n const parsedArgs = this._def.args.safeParse(args, params);\n if (!parsedArgs.success) {\n throw new ZodError_1.ZodError([makeArgsIssue(args, parsedArgs.error)]);\n }\n const result = fn(...parsedArgs.data);\n const parsedReturns = this._def.returns.safeParse(result, params);\n if (!parsedReturns.success) {\n throw new ZodError_1.ZodError([makeReturnsIssue(result, parsedReturns.error)]);\n }\n return parsedReturns.data;\n });\n }\n }\n parameters() {\n return this._def.args;\n }\n returnType() {\n return this._def.returns;\n }\n args(...items) {\n return new ZodFunction({\n ...this._def,\n args: ZodTuple.create(items).rest(ZodUnknown.create()),\n });\n }\n returns(returnType) {\n return new ZodFunction({\n ...this._def,\n returns: returnType,\n });\n }\n implement(func) {\n const validatedFunc = this.parse(func);\n return validatedFunc;\n }\n strictImplement(func) {\n const validatedFunc = this.parse(func);\n return validatedFunc;\n }\n}\nexports.ZodFunction = ZodFunction;\nZodFunction.create = (args, returns, params) => {\n return new ZodFunction({\n args: (args\n ? args.rest(ZodUnknown.create())\n : ZodTuple.create([]).rest(ZodUnknown.create())),\n returns: returns || ZodUnknown.create(),\n typeName: ZodFirstPartyTypeKind.ZodFunction,\n ...processCreateParams(params),\n });\n};\nclass ZodLazy extends ZodType {\n get schema() {\n return this._def.getter();\n }\n _parse(input) {\n const { ctx } = this._processInputParams(input);\n const lazySchema = this._def.getter();\n return lazySchema._parse({ data: ctx.data, path: ctx.path, parent: ctx });\n }\n}\nexports.ZodLazy = ZodLazy;\nZodLazy.create = (getter, params) => {\n return new ZodLazy({\n getter: getter,\n typeName: ZodFirstPartyTypeKind.ZodLazy,\n ...processCreateParams(params),\n });\n};\nclass ZodLiteral extends ZodType {\n _parse(input) {\n if (input.data !== this._def.value) {\n const ctx = this._getOrReturnCtx(input);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_literal,\n expected: this._def.value,\n });\n return parseUtil_1.INVALID;\n }\n return { status: \"valid\", value: input.data };\n }\n get value() {\n return this._def.value;\n }\n}\nexports.ZodLiteral = ZodLiteral;\nZodLiteral.create = (value, params) => {\n return new ZodLiteral({\n value: value,\n typeName: ZodFirstPartyTypeKind.ZodLiteral,\n ...processCreateParams(params),\n });\n};\nfunction createZodEnum(values, params) {\n return new ZodEnum({\n values: values,\n typeName: ZodFirstPartyTypeKind.ZodEnum,\n ...processCreateParams(params),\n });\n}\nclass ZodEnum extends ZodType {\n _parse(input) {\n if (typeof input.data !== \"string\") {\n const ctx = this._getOrReturnCtx(input);\n const expectedValues = this._def.values;\n parseUtil_1.addIssueToContext(ctx, {\n expected: util_1.util.joinValues(expectedValues),\n received: ctx.parsedType,\n code: ZodError_1.ZodIssueCode.invalid_type,\n });\n return parseUtil_1.INVALID;\n }\n if (this._def.values.indexOf(input.data) === -1) {\n const ctx = this._getOrReturnCtx(input);\n const expectedValues = this._def.values;\n parseUtil_1.addIssueToContext(ctx, {\n received: ctx.data,\n code: ZodError_1.ZodIssueCode.invalid_enum_value,\n options: expectedValues,\n });\n return parseUtil_1.INVALID;\n }\n return parseUtil_1.OK(input.data);\n }\n get options() {\n return this._def.values;\n }\n get enum() {\n const enumValues = {};\n for (const val of this._def.values) {\n enumValues[val] = val;\n }\n return enumValues;\n }\n get Values() {\n const enumValues = {};\n for (const val of this._def.values) {\n enumValues[val] = val;\n }\n return enumValues;\n }\n get Enum() {\n const enumValues = {};\n for (const val of this._def.values) {\n enumValues[val] = val;\n }\n return enumValues;\n }\n}\nexports.ZodEnum = ZodEnum;\nZodEnum.create = createZodEnum;\nclass ZodNativeEnum extends ZodType {\n _parse(input) {\n const nativeEnumValues = util_1.util.getValidEnumValues(this._def.values);\n const ctx = this._getOrReturnCtx(input);\n if (ctx.parsedType !== util_1.ZodParsedType.string &&\n ctx.parsedType !== util_1.ZodParsedType.number) {\n const expectedValues = util_1.util.objectValues(nativeEnumValues);\n parseUtil_1.addIssueToContext(ctx, {\n expected: util_1.util.joinValues(expectedValues),\n received: ctx.parsedType,\n code: ZodError_1.ZodIssueCode.invalid_type,\n });\n return parseUtil_1.INVALID;\n }\n if (nativeEnumValues.indexOf(input.data) === -1) {\n const expectedValues = util_1.util.objectValues(nativeEnumValues);\n parseUtil_1.addIssueToContext(ctx, {\n received: ctx.data,\n code: ZodError_1.ZodIssueCode.invalid_enum_value,\n options: expectedValues,\n });\n return parseUtil_1.INVALID;\n }\n return parseUtil_1.OK(input.data);\n }\n get enum() {\n return this._def.values;\n }\n}\nexports.ZodNativeEnum = ZodNativeEnum;\nZodNativeEnum.create = (values, params) => {\n return new ZodNativeEnum({\n values: values,\n typeName: ZodFirstPartyTypeKind.ZodNativeEnum,\n ...processCreateParams(params),\n });\n};\nclass ZodPromise extends ZodType {\n _parse(input) {\n const { ctx } = this._processInputParams(input);\n if (ctx.parsedType !== util_1.ZodParsedType.promise &&\n ctx.common.async === false) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.promise,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n const promisified = ctx.parsedType === util_1.ZodParsedType.promise\n ? ctx.data\n : Promise.resolve(ctx.data);\n return parseUtil_1.OK(promisified.then((data) => {\n return this._def.type.parseAsync(data, {\n path: ctx.path,\n errorMap: ctx.common.contextualErrorMap,\n });\n }));\n }\n}\nexports.ZodPromise = ZodPromise;\nZodPromise.create = (schema, params) => {\n return new ZodPromise({\n type: schema,\n typeName: ZodFirstPartyTypeKind.ZodPromise,\n ...processCreateParams(params),\n });\n};\nclass ZodEffects extends ZodType {\n innerType() {\n return this._def.schema;\n }\n _parse(input) {\n const { status, ctx } = this._processInputParams(input);\n const effect = this._def.effect || null;\n if (effect.type === \"preprocess\") {\n const processed = effect.transform(ctx.data);\n if (ctx.common.async) {\n return Promise.resolve(processed).then((processed) => {\n return this._def.schema._parseAsync({\n data: processed,\n path: ctx.path,\n parent: ctx,\n });\n });\n }\n else {\n return this._def.schema._parseSync({\n data: processed,\n path: ctx.path,\n parent: ctx,\n });\n }\n }\n const checkCtx = {\n addIssue: (arg) => {\n parseUtil_1.addIssueToContext(ctx, arg);\n if (arg.fatal) {\n status.abort();\n }\n else {\n status.dirty();\n }\n },\n get path() {\n return ctx.path;\n },\n };\n checkCtx.addIssue = checkCtx.addIssue.bind(checkCtx);\n if (effect.type === \"refinement\") {\n const executeRefinement = (acc\n // effect: RefinementEffect\n ) => {\n const result = effect.refinement(acc, checkCtx);\n if (ctx.common.async) {\n return Promise.resolve(result);\n }\n if (result instanceof Promise) {\n throw new Error(\"Async refinement encountered during synchronous parse operation. Use .parseAsync instead.\");\n }\n return acc;\n };\n if (ctx.common.async === false) {\n const inner = this._def.schema._parseSync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx,\n });\n if (inner.status === \"aborted\")\n return parseUtil_1.INVALID;\n if (inner.status === \"dirty\")\n status.dirty();\n // return value is ignored\n executeRefinement(inner.value);\n return { status: status.value, value: inner.value };\n }\n else {\n return this._def.schema\n ._parseAsync({ data: ctx.data, path: ctx.path, parent: ctx })\n .then((inner) => {\n if (inner.status === \"aborted\")\n return parseUtil_1.INVALID;\n if (inner.status === \"dirty\")\n status.dirty();\n return executeRefinement(inner.value).then(() => {\n return { status: status.value, value: inner.value };\n });\n });\n }\n }\n if (effect.type === \"transform\") {\n if (ctx.common.async === false) {\n const base = this._def.schema._parseSync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx,\n });\n // if (base.status === \"aborted\") return INVALID;\n // if (base.status === \"dirty\") {\n // return { status: \"dirty\", value: base.value };\n // }\n if (!parseUtil_1.isValid(base))\n return base;\n const result = effect.transform(base.value, checkCtx);\n if (result instanceof Promise) {\n throw new Error(`Asynchronous transform encountered during synchronous parse operation. Use .parseAsync instead.`);\n }\n return { status: status.value, value: result };\n }\n else {\n return this._def.schema\n ._parseAsync({ data: ctx.data, path: ctx.path, parent: ctx })\n .then((base) => {\n if (!parseUtil_1.isValid(base))\n return base;\n // if (base.status === \"aborted\") return INVALID;\n // if (base.status === \"dirty\") {\n // return { status: \"dirty\", value: base.value };\n // }\n return Promise.resolve(effect.transform(base.value, checkCtx)).then((result) => ({ status: status.value, value: result }));\n });\n }\n }\n util_1.util.assertNever(effect);\n }\n}\nexports.ZodEffects = ZodEffects;\nexports.ZodTransformer = ZodEffects;\nZodEffects.create = (schema, effect, params) => {\n return new ZodEffects({\n schema,\n typeName: ZodFirstPartyTypeKind.ZodEffects,\n effect,\n ...processCreateParams(params),\n });\n};\nZodEffects.createWithPreprocess = (preprocess, schema, params) => {\n return new ZodEffects({\n schema,\n effect: { type: \"preprocess\", transform: preprocess },\n typeName: ZodFirstPartyTypeKind.ZodEffects,\n ...processCreateParams(params),\n });\n};\nclass ZodOptional extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType === util_1.ZodParsedType.undefined) {\n return parseUtil_1.OK(undefined);\n }\n return this._def.innerType._parse(input);\n }\n unwrap() {\n return this._def.innerType;\n }\n}\nexports.ZodOptional = ZodOptional;\nZodOptional.create = (type, params) => {\n return new ZodOptional({\n innerType: type,\n typeName: ZodFirstPartyTypeKind.ZodOptional,\n ...processCreateParams(params),\n });\n};\nclass ZodNullable extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType === util_1.ZodParsedType.null) {\n return parseUtil_1.OK(null);\n }\n return this._def.innerType._parse(input);\n }\n unwrap() {\n return this._def.innerType;\n }\n}\nexports.ZodNullable = ZodNullable;\nZodNullable.create = (type, params) => {\n return new ZodNullable({\n innerType: type,\n typeName: ZodFirstPartyTypeKind.ZodNullable,\n ...processCreateParams(params),\n });\n};\nclass ZodDefault extends ZodType {\n _parse(input) {\n const { ctx } = this._processInputParams(input);\n let data = ctx.data;\n if (ctx.parsedType === util_1.ZodParsedType.undefined) {\n data = this._def.defaultValue();\n }\n return this._def.innerType._parse({\n data,\n path: ctx.path,\n parent: ctx,\n });\n }\n removeDefault() {\n return this._def.innerType;\n }\n}\nexports.ZodDefault = ZodDefault;\nZodDefault.create = (type, params) => {\n return new ZodOptional({\n innerType: type,\n typeName: ZodFirstPartyTypeKind.ZodOptional,\n ...processCreateParams(params),\n });\n};\nclass ZodNaN extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== util_1.ZodParsedType.nan) {\n const ctx = this._getOrReturnCtx(input);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.nan,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n return { status: \"valid\", value: input.data };\n }\n}\nexports.ZodNaN = ZodNaN;\nZodNaN.create = (params) => {\n return new ZodNaN({\n typeName: ZodFirstPartyTypeKind.ZodNaN,\n ...processCreateParams(params),\n });\n};\nconst custom = (check, params = {}, fatal) => {\n if (check)\n return ZodAny.create().superRefine((data, ctx) => {\n if (!check(data)) {\n const p = typeof params === \"function\" ? params(data) : params;\n const p2 = typeof p === \"string\" ? { message: p } : p;\n ctx.addIssue({ code: \"custom\", ...p2, fatal });\n }\n });\n return ZodAny.create();\n};\nexports.custom = custom;\nexports.late = {\n object: ZodObject.lazycreate,\n};\nvar ZodFirstPartyTypeKind;\n(function (ZodFirstPartyTypeKind) {\n ZodFirstPartyTypeKind[\"ZodString\"] = \"ZodString\";\n ZodFirstPartyTypeKind[\"ZodNumber\"] = \"ZodNumber\";\n ZodFirstPartyTypeKind[\"ZodNaN\"] = \"ZodNaN\";\n ZodFirstPartyTypeKind[\"ZodBigInt\"] = \"ZodBigInt\";\n ZodFirstPartyTypeKind[\"ZodBoolean\"] = \"ZodBoolean\";\n ZodFirstPartyTypeKind[\"ZodDate\"] = \"ZodDate\";\n ZodFirstPartyTypeKind[\"ZodUndefined\"] = \"ZodUndefined\";\n ZodFirstPartyTypeKind[\"ZodNull\"] = \"ZodNull\";\n ZodFirstPartyTypeKind[\"ZodAny\"] = \"ZodAny\";\n ZodFirstPartyTypeKind[\"ZodUnknown\"] = \"ZodUnknown\";\n ZodFirstPartyTypeKind[\"ZodNever\"] = \"ZodNever\";\n ZodFirstPartyTypeKind[\"ZodVoid\"] = \"ZodVoid\";\n ZodFirstPartyTypeKind[\"ZodArray\"] = \"ZodArray\";\n ZodFirstPartyTypeKind[\"ZodObject\"] = \"ZodObject\";\n ZodFirstPartyTypeKind[\"ZodUnion\"] = \"ZodUnion\";\n ZodFirstPartyTypeKind[\"ZodDiscriminatedUnion\"] = \"ZodDiscriminatedUnion\";\n ZodFirstPartyTypeKind[\"ZodIntersection\"] = \"ZodIntersection\";\n ZodFirstPartyTypeKind[\"ZodTuple\"] = \"ZodTuple\";\n ZodFirstPartyTypeKind[\"ZodRecord\"] = \"ZodRecord\";\n ZodFirstPartyTypeKind[\"ZodMap\"] = \"ZodMap\";\n ZodFirstPartyTypeKind[\"ZodSet\"] = \"ZodSet\";\n ZodFirstPartyTypeKind[\"ZodFunction\"] = \"ZodFunction\";\n ZodFirstPartyTypeKind[\"ZodLazy\"] = \"ZodLazy\";\n ZodFirstPartyTypeKind[\"ZodLiteral\"] = \"ZodLiteral\";\n ZodFirstPartyTypeKind[\"ZodEnum\"] = \"ZodEnum\";\n ZodFirstPartyTypeKind[\"ZodEffects\"] = \"ZodEffects\";\n ZodFirstPartyTypeKind[\"ZodNativeEnum\"] = \"ZodNativeEnum\";\n ZodFirstPartyTypeKind[\"ZodOptional\"] = \"ZodOptional\";\n ZodFirstPartyTypeKind[\"ZodNullable\"] = \"ZodNullable\";\n ZodFirstPartyTypeKind[\"ZodDefault\"] = \"ZodDefault\";\n ZodFirstPartyTypeKind[\"ZodPromise\"] = \"ZodPromise\";\n})(ZodFirstPartyTypeKind = exports.ZodFirstPartyTypeKind || (exports.ZodFirstPartyTypeKind = {}));\nconst instanceOfType = (cls, params = {\n message: `Input not instance of ${cls.name}`,\n}) => exports.custom((data) => data instanceof cls, params, true);\nexports.instanceof = instanceOfType;\nconst stringType = ZodString.create;\nexports.string = stringType;\nconst numberType = ZodNumber.create;\nexports.number = numberType;\nconst nanType = ZodNaN.create;\nexports.nan = nanType;\nconst bigIntType = ZodBigInt.create;\nexports.bigint = bigIntType;\nconst booleanType = ZodBoolean.create;\nexports.boolean = booleanType;\nconst dateType = ZodDate.create;\nexports.date = dateType;\nconst undefinedType = ZodUndefined.create;\nexports.undefined = undefinedType;\nconst nullType = ZodNull.create;\nexports.null = nullType;\nconst anyType = ZodAny.create;\nexports.any = anyType;\nconst unknownType = ZodUnknown.create;\nexports.unknown = unknownType;\nconst neverType = ZodNever.create;\nexports.never = neverType;\nconst voidType = ZodVoid.create;\nexports.void = voidType;\nconst arrayType = ZodArray.create;\nexports.array = arrayType;\nconst objectType = ZodObject.create;\nexports.object = objectType;\nconst strictObjectType = ZodObject.strictCreate;\nexports.strictObject = strictObjectType;\nconst unionType = ZodUnion.create;\nexports.union = unionType;\nconst discriminatedUnionType = ZodDiscriminatedUnion.create;\nexports.discriminatedUnion = discriminatedUnionType;\nconst intersectionType = ZodIntersection.create;\nexports.intersection = intersectionType;\nconst tupleType = ZodTuple.create;\nexports.tuple = tupleType;\nconst recordType = ZodRecord.create;\nexports.record = recordType;\nconst mapType = ZodMap.create;\nexports.map = mapType;\nconst setType = ZodSet.create;\nexports.set = setType;\nconst functionType = ZodFunction.create;\nexports.function = functionType;\nconst lazyType = ZodLazy.create;\nexports.lazy = lazyType;\nconst literalType = ZodLiteral.create;\nexports.literal = literalType;\nconst enumType = ZodEnum.create;\nexports.enum = enumType;\nconst nativeEnumType = ZodNativeEnum.create;\nexports.nativeEnum = nativeEnumType;\nconst promiseType = ZodPromise.create;\nexports.promise = promiseType;\nconst effectsType = ZodEffects.create;\nexports.effect = effectsType;\nexports.transformer = effectsType;\nconst optionalType = ZodOptional.create;\nexports.optional = optionalType;\nconst nullableType = ZodNullable.create;\nexports.nullable = nullableType;\nconst preprocessType = ZodEffects.createWithPreprocess;\nexports.preprocess = preprocessType;\nconst ostring = () => stringType().optional();\nexports.ostring = ostring;\nconst onumber = () => numberType().optional();\nexports.onumber = onumber;\nconst oboolean = () => booleanType().optional();\nexports.oboolean = oboolean;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.readConfigFile = exports.CONFIG_FILEPATH = void 0;\nconst fs = __importStar(require(\"fs\"));\nconst yaml_1 = __importDefault(require(\"yaml\"));\nconst schemas_1 = require(\"./schemas\");\nconst path_1 = __importDefault(require(\"path\"));\nexports.CONFIG_FILEPATH = './.github/dependency-review.yml';\nfunction readConfigFile(filePath = exports.CONFIG_FILEPATH) {\n // By default we want to fail on all severities and allow all licenses.\n const defaultOptions = {\n fail_on_severity: 'low'\n };\n let data;\n try {\n data = fs.readFileSync(path_1.default.resolve(filePath), 'utf-8');\n }\n catch (error) {\n if (error.code && error.code === 'ENOENT') {\n return defaultOptions;\n }\n else {\n throw error;\n }\n }\n return schemas_1.ConfigurationOptionsSchema.parse(yaml_1.default.parse(data));\n}\nexports.readConfigFile = readConfigFile;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.filterChangesBySeverity = void 0;\nconst schemas_1 = require(\"./schemas\");\nfunction filterChangesBySeverity(severity, changes) {\n const severityIdx = schemas_1.SEVERITIES.indexOf(severity);\n let filteredChanges = [];\n for (let change of changes) {\n if (change === undefined ||\n change.vulnerabilities === undefined ||\n change.vulnerabilities.length === 0) {\n continue;\n }\n let fChange = Object.assign(Object.assign({}, change), { vulnerabilities: change.vulnerabilities.filter(vuln => {\n const vulnIdx = schemas_1.SEVERITIES.indexOf(vuln.severity);\n if (vulnIdx <= severityIdx) {\n return true;\n }\n }) });\n filteredChanges.push(fChange);\n }\n // don't want to deal with changes with no vulnerabilities\n filteredChanges = filteredChanges.filter(change => change.vulnerabilities.length > 0);\n return filteredChanges;\n}\nexports.filterChangesBySeverity = filterChangesBySeverity;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ChangesSchema = exports.ConfigurationOptionsSchema = exports.PullRequestSchema = exports.ChangeSchema = exports.SEVERITIES = void 0;\nconst z = __importStar(require(\"zod\"));\nexports.SEVERITIES = ['critical', 'high', 'moderate', 'low'];\nexports.ChangeSchema = z.object({\n change_type: z.enum(['added', 'removed']),\n manifest: z.string(),\n ecosystem: z.string(),\n name: z.string(),\n version: z.string(),\n package_url: z.string(),\n license: z.string().nullable(),\n source_repository_url: z.string().nullable(),\n vulnerabilities: z\n .array(z.object({\n severity: z.enum(['critical', 'high', 'moderate', 'low']),\n advisory_ghsa_id: z.string(),\n advisory_summary: z.string(),\n advisory_url: z.string()\n }))\n .optional()\n .default([])\n});\nexports.PullRequestSchema = z.object({\n number: z.number(),\n base: z.object({ sha: z.string() }),\n head: z.object({ sha: z.string() })\n});\nexports.ConfigurationOptionsSchema = z\n .object({\n fail_on_severity: z.enum(exports.SEVERITIES).default('low'),\n allow_licenses: z.array(z.string()).default([]),\n deny_licenses: z.array(z.string()).default([])\n})\n .partial()\n .refine(obj => !(obj.allow_licenses && obj.deny_licenses), \"Can't specify both allow_licenses and deny_licenses\");\nexports.ChangesSchema = z.array(exports.ChangeSchema);\n",null,"module.exports = require(\"assert\");","module.exports = require(\"events\");","module.exports = require(\"fs\");","module.exports = require(\"http\");","module.exports = require(\"https\");","module.exports = require(\"net\");","module.exports = require(\"os\");","module.exports = require(\"path\");","module.exports = require(\"punycode\");","module.exports = require(\"stream\");","module.exports = require(\"tls\");","module.exports = require(\"url\");","module.exports = require(\"util\");","module.exports = require(\"zlib\");","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar Scalar = require('../nodes/Scalar.js');\nvar resolveBlockMap = require('./resolve-block-map.js');\nvar resolveBlockSeq = require('./resolve-block-seq.js');\nvar resolveFlowCollection = require('./resolve-flow-collection.js');\n\nfunction composeCollection(CN, ctx, token, tagToken, onError) {\n let coll;\n switch (token.type) {\n case 'block-map': {\n coll = resolveBlockMap.resolveBlockMap(CN, ctx, token, onError);\n break;\n }\n case 'block-seq': {\n coll = resolveBlockSeq.resolveBlockSeq(CN, ctx, token, onError);\n break;\n }\n case 'flow-collection': {\n coll = resolveFlowCollection.resolveFlowCollection(CN, ctx, token, onError);\n break;\n }\n }\n if (!tagToken)\n return coll;\n const tagName = ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg));\n if (!tagName)\n return coll;\n // Cast needed due to: https://github.com/Microsoft/TypeScript/issues/3841\n const Coll = coll.constructor;\n if (tagName === '!' || tagName === Coll.tagName) {\n coll.tag = Coll.tagName;\n return coll;\n }\n const expType = Node.isMap(coll) ? 'map' : 'seq';\n let tag = ctx.schema.tags.find(t => t.collection === expType && t.tag === tagName);\n if (!tag) {\n const kt = ctx.schema.knownTags[tagName];\n if (kt && kt.collection === expType) {\n ctx.schema.tags.push(Object.assign({}, kt, { default: false }));\n tag = kt;\n }\n else {\n onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true);\n coll.tag = tagName;\n return coll;\n }\n }\n const res = tag.resolve(coll, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg), ctx.options);\n const node = Node.isNode(res)\n ? res\n : new Scalar.Scalar(res);\n node.range = coll.range;\n node.tag = tagName;\n if (tag?.format)\n node.format = tag.format;\n return node;\n}\n\nexports.composeCollection = composeCollection;\n","'use strict';\n\nvar Document = require('../doc/Document.js');\nvar composeNode = require('./compose-node.js');\nvar resolveEnd = require('./resolve-end.js');\nvar resolveProps = require('./resolve-props.js');\n\nfunction composeDoc(options, directives, { offset, start, value, end }, onError) {\n const opts = Object.assign({ _directives: directives }, options);\n const doc = new Document.Document(undefined, opts);\n const ctx = {\n atRoot: true,\n directives: doc.directives,\n options: doc.options,\n schema: doc.schema\n };\n const props = resolveProps.resolveProps(start, {\n indicator: 'doc-start',\n next: value ?? end?.[0],\n offset,\n onError,\n startOnNewline: true\n });\n if (props.found) {\n doc.directives.docStart = true;\n if (value &&\n (value.type === 'block-map' || value.type === 'block-seq') &&\n !props.hasNewline)\n onError(props.end, 'MISSING_CHAR', 'Block collection cannot start on same line with directives-end marker');\n }\n doc.contents = value\n ? composeNode.composeNode(ctx, value, props, onError)\n : composeNode.composeEmptyNode(ctx, props.end, start, null, props, onError);\n const contentEnd = doc.contents.range[2];\n const re = resolveEnd.resolveEnd(end, contentEnd, false, onError);\n if (re.comment)\n doc.comment = re.comment;\n doc.range = [offset, contentEnd, re.offset];\n return doc;\n}\n\nexports.composeDoc = composeDoc;\n","'use strict';\n\nvar Alias = require('../nodes/Alias.js');\nvar composeCollection = require('./compose-collection.js');\nvar composeScalar = require('./compose-scalar.js');\nvar resolveEnd = require('./resolve-end.js');\nvar utilEmptyScalarPosition = require('./util-empty-scalar-position.js');\n\nconst CN = { composeNode, composeEmptyNode };\nfunction composeNode(ctx, token, props, onError) {\n const { spaceBefore, comment, anchor, tag } = props;\n let node;\n let isSrcToken = true;\n switch (token.type) {\n case 'alias':\n node = composeAlias(ctx, token, onError);\n if (anchor || tag)\n onError(token, 'ALIAS_PROPS', 'An alias node must not specify any properties');\n break;\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n case 'block-scalar':\n node = composeScalar.composeScalar(ctx, token, tag, onError);\n if (anchor)\n node.anchor = anchor.source.substring(1);\n break;\n case 'block-map':\n case 'block-seq':\n case 'flow-collection':\n node = composeCollection.composeCollection(CN, ctx, token, tag, onError);\n if (anchor)\n node.anchor = anchor.source.substring(1);\n break;\n default: {\n const message = token.type === 'error'\n ? token.message\n : `Unsupported token (type: ${token.type})`;\n onError(token, 'UNEXPECTED_TOKEN', message);\n node = composeEmptyNode(ctx, token.offset, undefined, null, props, onError);\n isSrcToken = false;\n }\n }\n if (anchor && node.anchor === '')\n onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');\n if (spaceBefore)\n node.spaceBefore = true;\n if (comment) {\n if (token.type === 'scalar' && token.source === '')\n node.comment = comment;\n else\n node.commentBefore = comment;\n }\n // @ts-expect-error Type checking misses meaning of isSrcToken\n if (ctx.options.keepSourceTokens && isSrcToken)\n node.srcToken = token;\n return node;\n}\nfunction composeEmptyNode(ctx, offset, before, pos, { spaceBefore, comment, anchor, tag }, onError) {\n const token = {\n type: 'scalar',\n offset: utilEmptyScalarPosition.emptyScalarPosition(offset, before, pos),\n indent: -1,\n source: ''\n };\n const node = composeScalar.composeScalar(ctx, token, tag, onError);\n if (anchor) {\n node.anchor = anchor.source.substring(1);\n if (node.anchor === '')\n onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');\n }\n if (spaceBefore)\n node.spaceBefore = true;\n if (comment)\n node.comment = comment;\n return node;\n}\nfunction composeAlias({ options }, { offset, source, end }, onError) {\n const alias = new Alias.Alias(source.substring(1));\n if (alias.source === '')\n onError(offset, 'BAD_ALIAS', 'Alias cannot be an empty string');\n if (alias.source.endsWith(':'))\n onError(offset + source.length - 1, 'BAD_ALIAS', 'Alias ending in : is ambiguous', true);\n const valueEnd = offset + source.length;\n const re = resolveEnd.resolveEnd(end, valueEnd, options.strict, onError);\n alias.range = [offset, valueEnd, re.offset];\n if (re.comment)\n alias.comment = re.comment;\n return alias;\n}\n\nexports.composeEmptyNode = composeEmptyNode;\nexports.composeNode = composeNode;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar Scalar = require('../nodes/Scalar.js');\nvar resolveBlockScalar = require('./resolve-block-scalar.js');\nvar resolveFlowScalar = require('./resolve-flow-scalar.js');\n\nfunction composeScalar(ctx, token, tagToken, onError) {\n const { value, type, comment, range } = token.type === 'block-scalar'\n ? resolveBlockScalar.resolveBlockScalar(token, ctx.options.strict, onError)\n : resolveFlowScalar.resolveFlowScalar(token, ctx.options.strict, onError);\n const tagName = tagToken\n ? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg))\n : null;\n const tag = tagToken && tagName\n ? findScalarTagByName(ctx.schema, value, tagName, tagToken, onError)\n : token.type === 'scalar'\n ? findScalarTagByTest(ctx, value, token, onError)\n : ctx.schema[Node.SCALAR];\n let scalar;\n try {\n const res = tag.resolve(value, msg => onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg), ctx.options);\n scalar = Node.isScalar(res) ? res : new Scalar.Scalar(res);\n }\n catch (error) {\n const msg = error instanceof Error ? error.message : String(error);\n onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg);\n scalar = new Scalar.Scalar(value);\n }\n scalar.range = range;\n scalar.source = value;\n if (type)\n scalar.type = type;\n if (tagName)\n scalar.tag = tagName;\n if (tag.format)\n scalar.format = tag.format;\n if (comment)\n scalar.comment = comment;\n return scalar;\n}\nfunction findScalarTagByName(schema, value, tagName, tagToken, onError) {\n if (tagName === '!')\n return schema[Node.SCALAR]; // non-specific tag\n const matchWithTest = [];\n for (const tag of schema.tags) {\n if (!tag.collection && tag.tag === tagName) {\n if (tag.default && tag.test)\n matchWithTest.push(tag);\n else\n return tag;\n }\n }\n for (const tag of matchWithTest)\n if (tag.test?.test(value))\n return tag;\n const kt = schema.knownTags[tagName];\n if (kt && !kt.collection) {\n // Ensure that the known tag is available for stringifying,\n // but does not get used by default.\n schema.tags.push(Object.assign({}, kt, { default: false, test: undefined }));\n return kt;\n }\n onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str');\n return schema[Node.SCALAR];\n}\nfunction findScalarTagByTest({ directives, schema }, value, token, onError) {\n const tag = schema.tags.find(tag => tag.default && tag.test?.test(value)) || schema[Node.SCALAR];\n if (schema.compat) {\n const compat = schema.compat.find(tag => tag.default && tag.test?.test(value)) ??\n schema[Node.SCALAR];\n if (tag.tag !== compat.tag) {\n const ts = directives.tagString(tag.tag);\n const cs = directives.tagString(compat.tag);\n const msg = `Value may be parsed as either ${ts} or ${cs}`;\n onError(token, 'TAG_RESOLVE_FAILED', msg, true);\n }\n }\n return tag;\n}\n\nexports.composeScalar = composeScalar;\n","'use strict';\n\nvar directives = require('../doc/directives.js');\nvar Document = require('../doc/Document.js');\nvar errors = require('../errors.js');\nvar Node = require('../nodes/Node.js');\nvar composeDoc = require('./compose-doc.js');\nvar resolveEnd = require('./resolve-end.js');\n\nfunction getErrorPos(src) {\n if (typeof src === 'number')\n return [src, src + 1];\n if (Array.isArray(src))\n return src.length === 2 ? src : [src[0], src[1]];\n const { offset, source } = src;\n return [offset, offset + (typeof source === 'string' ? source.length : 1)];\n}\nfunction parsePrelude(prelude) {\n let comment = '';\n let atComment = false;\n let afterEmptyLine = false;\n for (let i = 0; i < prelude.length; ++i) {\n const source = prelude[i];\n switch (source[0]) {\n case '#':\n comment +=\n (comment === '' ? '' : afterEmptyLine ? '\\n\\n' : '\\n') +\n (source.substring(1) || ' ');\n atComment = true;\n afterEmptyLine = false;\n break;\n case '%':\n if (prelude[i + 1]?.[0] !== '#')\n i += 1;\n atComment = false;\n break;\n default:\n // This may be wrong after doc-end, but in that case it doesn't matter\n if (!atComment)\n afterEmptyLine = true;\n atComment = false;\n }\n }\n return { comment, afterEmptyLine };\n}\n/**\n * Compose a stream of CST nodes into a stream of YAML Documents.\n *\n * ```ts\n * import { Composer, Parser } from 'yaml'\n *\n * const src: string = ...\n * const tokens = new Parser().parse(src)\n * const docs = new Composer().compose(tokens)\n * ```\n */\nclass Composer {\n constructor(options = {}) {\n this.doc = null;\n this.atDirectives = false;\n this.prelude = [];\n this.errors = [];\n this.warnings = [];\n this.onError = (source, code, message, warning) => {\n const pos = getErrorPos(source);\n if (warning)\n this.warnings.push(new errors.YAMLWarning(pos, code, message));\n else\n this.errors.push(new errors.YAMLParseError(pos, code, message));\n };\n // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing\n this.directives = new directives.Directives({ version: options.version || '1.2' });\n this.options = options;\n }\n decorate(doc, afterDoc) {\n const { comment, afterEmptyLine } = parsePrelude(this.prelude);\n //console.log({ dc: doc.comment, prelude, comment })\n if (comment) {\n const dc = doc.contents;\n if (afterDoc) {\n doc.comment = doc.comment ? `${doc.comment}\\n${comment}` : comment;\n }\n else if (afterEmptyLine || doc.directives.docStart || !dc) {\n doc.commentBefore = comment;\n }\n else if (Node.isCollection(dc) && !dc.flow && dc.items.length > 0) {\n let it = dc.items[0];\n if (Node.isPair(it))\n it = it.key;\n const cb = it.commentBefore;\n it.commentBefore = cb ? `${comment}\\n${cb}` : comment;\n }\n else {\n const cb = dc.commentBefore;\n dc.commentBefore = cb ? `${comment}\\n${cb}` : comment;\n }\n }\n if (afterDoc) {\n Array.prototype.push.apply(doc.errors, this.errors);\n Array.prototype.push.apply(doc.warnings, this.warnings);\n }\n else {\n doc.errors = this.errors;\n doc.warnings = this.warnings;\n }\n this.prelude = [];\n this.errors = [];\n this.warnings = [];\n }\n /**\n * Current stream status information.\n *\n * Mostly useful at the end of input for an empty stream.\n */\n streamInfo() {\n return {\n comment: parsePrelude(this.prelude).comment,\n directives: this.directives,\n errors: this.errors,\n warnings: this.warnings\n };\n }\n /**\n * Compose tokens into documents.\n *\n * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.\n * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.\n */\n *compose(tokens, forceDoc = false, endOffset = -1) {\n for (const token of tokens)\n yield* this.next(token);\n yield* this.end(forceDoc, endOffset);\n }\n /** Advance the composer by one CST token. */\n *next(token) {\n if (process.env.LOG_STREAM)\n console.dir(token, { depth: null });\n switch (token.type) {\n case 'directive':\n this.directives.add(token.source, (offset, message, warning) => {\n const pos = getErrorPos(token);\n pos[0] += offset;\n this.onError(pos, 'BAD_DIRECTIVE', message, warning);\n });\n this.prelude.push(token.source);\n this.atDirectives = true;\n break;\n case 'document': {\n const doc = composeDoc.composeDoc(this.options, this.directives, token, this.onError);\n if (this.atDirectives && !doc.directives.docStart)\n this.onError(token, 'MISSING_CHAR', 'Missing directives-end/doc-start indicator line');\n this.decorate(doc, false);\n if (this.doc)\n yield this.doc;\n this.doc = doc;\n this.atDirectives = false;\n break;\n }\n case 'byte-order-mark':\n case 'space':\n break;\n case 'comment':\n case 'newline':\n this.prelude.push(token.source);\n break;\n case 'error': {\n const msg = token.source\n ? `${token.message}: ${JSON.stringify(token.source)}`\n : token.message;\n const error = new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg);\n if (this.atDirectives || !this.doc)\n this.errors.push(error);\n else\n this.doc.errors.push(error);\n break;\n }\n case 'doc-end': {\n if (!this.doc) {\n const msg = 'Unexpected doc-end without preceding document';\n this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg));\n break;\n }\n this.doc.directives.docEnd = true;\n const end = resolveEnd.resolveEnd(token.end, token.offset + token.source.length, this.doc.options.strict, this.onError);\n this.decorate(this.doc, true);\n if (end.comment) {\n const dc = this.doc.comment;\n this.doc.comment = dc ? `${dc}\\n${end.comment}` : end.comment;\n }\n this.doc.range[2] = end.offset;\n break;\n }\n default:\n this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', `Unsupported token ${token.type}`));\n }\n }\n /**\n * Call at end of input to yield any remaining document.\n *\n * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.\n * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.\n */\n *end(forceDoc = false, endOffset = -1) {\n if (this.doc) {\n this.decorate(this.doc, true);\n yield this.doc;\n this.doc = null;\n }\n else if (forceDoc) {\n const opts = Object.assign({ _directives: this.directives }, this.options);\n const doc = new Document.Document(undefined, opts);\n if (this.atDirectives)\n this.onError(endOffset, 'MISSING_CHAR', 'Missing directives-end indicator line');\n doc.range = [0, endOffset, endOffset];\n this.decorate(doc, false);\n yield doc;\n }\n }\n}\n\nexports.Composer = Composer;\n","'use strict';\n\nvar Pair = require('../nodes/Pair.js');\nvar YAMLMap = require('../nodes/YAMLMap.js');\nvar resolveProps = require('./resolve-props.js');\nvar utilContainsNewline = require('./util-contains-newline.js');\nvar utilFlowIndentCheck = require('./util-flow-indent-check.js');\nvar utilMapIncludes = require('./util-map-includes.js');\n\nconst startColMsg = 'All mapping items must start at the same column';\nfunction resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError) {\n const map = new YAMLMap.YAMLMap(ctx.schema);\n if (ctx.atRoot)\n ctx.atRoot = false;\n let offset = bm.offset;\n for (const collItem of bm.items) {\n const { start, key, sep, value } = collItem;\n // key properties\n const keyProps = resolveProps.resolveProps(start, {\n indicator: 'explicit-key-ind',\n next: key ?? sep?.[0],\n offset,\n onError,\n startOnNewline: true\n });\n const implicitKey = !keyProps.found;\n if (implicitKey) {\n if (key) {\n if (key.type === 'block-seq')\n onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key');\n else if ('indent' in key && key.indent !== bm.indent)\n onError(offset, 'BAD_INDENT', startColMsg);\n }\n if (!keyProps.anchor && !keyProps.tag && !sep) {\n // TODO: assert being at last item?\n if (keyProps.comment) {\n if (map.comment)\n map.comment += '\\n' + keyProps.comment;\n else\n map.comment = keyProps.comment;\n }\n continue;\n }\n if (keyProps.hasNewlineAfterProp || utilContainsNewline.containsNewline(key)) {\n onError(key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line');\n }\n }\n else if (keyProps.found?.indent !== bm.indent) {\n onError(offset, 'BAD_INDENT', startColMsg);\n }\n // key value\n const keyStart = keyProps.end;\n const keyNode = key\n ? composeNode(ctx, key, keyProps, onError)\n : composeEmptyNode(ctx, keyStart, start, null, keyProps, onError);\n if (ctx.schema.compat)\n utilFlowIndentCheck.flowIndentCheck(bm.indent, key, onError);\n if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode))\n onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');\n // value properties\n const valueProps = resolveProps.resolveProps(sep ?? [], {\n indicator: 'map-value-ind',\n next: value,\n offset: keyNode.range[2],\n onError,\n startOnNewline: !key || key.type === 'block-scalar'\n });\n offset = valueProps.end;\n if (valueProps.found) {\n if (implicitKey) {\n if (value?.type === 'block-map' && !valueProps.hasNewline)\n onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings');\n if (ctx.options.strict &&\n keyProps.start < valueProps.found.offset - 1024)\n onError(keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key');\n }\n // value value\n const valueNode = value\n ? composeNode(ctx, value, valueProps, onError)\n : composeEmptyNode(ctx, offset, sep, null, valueProps, onError);\n if (ctx.schema.compat)\n utilFlowIndentCheck.flowIndentCheck(bm.indent, value, onError);\n offset = valueNode.range[2];\n const pair = new Pair.Pair(keyNode, valueNode);\n if (ctx.options.keepSourceTokens)\n pair.srcToken = collItem;\n map.items.push(pair);\n }\n else {\n // key with no value\n if (implicitKey)\n onError(keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values');\n if (valueProps.comment) {\n if (keyNode.comment)\n keyNode.comment += '\\n' + valueProps.comment;\n else\n keyNode.comment = valueProps.comment;\n }\n const pair = new Pair.Pair(keyNode);\n if (ctx.options.keepSourceTokens)\n pair.srcToken = collItem;\n map.items.push(pair);\n }\n }\n map.range = [bm.offset, offset, offset];\n return map;\n}\n\nexports.resolveBlockMap = resolveBlockMap;\n","'use strict';\n\nvar Scalar = require('../nodes/Scalar.js');\n\nfunction resolveBlockScalar(scalar, strict, onError) {\n const start = scalar.offset;\n const header = parseBlockScalarHeader(scalar, strict, onError);\n if (!header)\n return { value: '', type: null, comment: '', range: [start, start, start] };\n const type = header.mode === '>' ? Scalar.Scalar.BLOCK_FOLDED : Scalar.Scalar.BLOCK_LITERAL;\n const lines = scalar.source ? splitLines(scalar.source) : [];\n // determine the end of content & start of chomping\n let chompStart = lines.length;\n for (let i = lines.length - 1; i >= 0; --i) {\n const content = lines[i][1];\n if (content === '' || content === '\\r')\n chompStart = i;\n else\n break;\n }\n // shortcut for empty contents\n if (chompStart === 0) {\n const value = header.chomp === '+' && lines.length > 0\n ? '\\n'.repeat(Math.max(1, lines.length - 1))\n : '';\n let end = start + header.length;\n if (scalar.source)\n end += scalar.source.length;\n return { value, type, comment: header.comment, range: [start, end, end] };\n }\n // find the indentation level to trim from start\n let trimIndent = scalar.indent + header.indent;\n let offset = scalar.offset + header.length;\n let contentStart = 0;\n for (let i = 0; i < chompStart; ++i) {\n const [indent, content] = lines[i];\n if (content === '' || content === '\\r') {\n if (header.indent === 0 && indent.length > trimIndent)\n trimIndent = indent.length;\n }\n else {\n if (indent.length < trimIndent) {\n const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator';\n onError(offset + indent.length, 'MISSING_CHAR', message);\n }\n if (header.indent === 0)\n trimIndent = indent.length;\n contentStart = i;\n break;\n }\n offset += indent.length + content.length + 1;\n }\n // include trailing more-indented empty lines in content\n for (let i = lines.length - 1; i >= chompStart; --i) {\n if (lines[i][0].length > trimIndent)\n chompStart = i + 1;\n }\n let value = '';\n let sep = '';\n let prevMoreIndented = false;\n // leading whitespace is kept intact\n for (let i = 0; i < contentStart; ++i)\n value += lines[i][0].slice(trimIndent) + '\\n';\n for (let i = contentStart; i < chompStart; ++i) {\n let [indent, content] = lines[i];\n offset += indent.length + content.length + 1;\n const crlf = content[content.length - 1] === '\\r';\n if (crlf)\n content = content.slice(0, -1);\n /* istanbul ignore if already caught in lexer */\n if (content && indent.length < trimIndent) {\n const src = header.indent\n ? 'explicit indentation indicator'\n : 'first line';\n const message = `Block scalar lines must not be less indented than their ${src}`;\n onError(offset - content.length - (crlf ? 2 : 1), 'BAD_INDENT', message);\n indent = '';\n }\n if (type === Scalar.Scalar.BLOCK_LITERAL) {\n value += sep + indent.slice(trimIndent) + content;\n sep = '\\n';\n }\n else if (indent.length > trimIndent || content[0] === '\\t') {\n // more-indented content within a folded block\n if (sep === ' ')\n sep = '\\n';\n else if (!prevMoreIndented && sep === '\\n')\n sep = '\\n\\n';\n value += sep + indent.slice(trimIndent) + content;\n sep = '\\n';\n prevMoreIndented = true;\n }\n else if (content === '') {\n // empty line\n if (sep === '\\n')\n value += '\\n';\n else\n sep = '\\n';\n }\n else {\n value += sep + content;\n sep = ' ';\n prevMoreIndented = false;\n }\n }\n switch (header.chomp) {\n case '-':\n break;\n case '+':\n for (let i = chompStart; i < lines.length; ++i)\n value += '\\n' + lines[i][0].slice(trimIndent);\n if (value[value.length - 1] !== '\\n')\n value += '\\n';\n break;\n default:\n value += '\\n';\n }\n const end = start + header.length + scalar.source.length;\n return { value, type, comment: header.comment, range: [start, end, end] };\n}\nfunction parseBlockScalarHeader({ offset, props }, strict, onError) {\n /* istanbul ignore if should not happen */\n if (props[0].type !== 'block-scalar-header') {\n onError(props[0], 'IMPOSSIBLE', 'Block scalar header not found');\n return null;\n }\n const { source } = props[0];\n const mode = source[0];\n let indent = 0;\n let chomp = '';\n let error = -1;\n for (let i = 1; i < source.length; ++i) {\n const ch = source[i];\n if (!chomp && (ch === '-' || ch === '+'))\n chomp = ch;\n else {\n const n = Number(ch);\n if (!indent && n)\n indent = n;\n else if (error === -1)\n error = offset + i;\n }\n }\n if (error !== -1)\n onError(error, 'UNEXPECTED_TOKEN', `Block scalar header includes extra characters: ${source}`);\n let hasSpace = false;\n let comment = '';\n let length = source.length;\n for (let i = 1; i < props.length; ++i) {\n const token = props[i];\n switch (token.type) {\n case 'space':\n hasSpace = true;\n // fallthrough\n case 'newline':\n length += token.source.length;\n break;\n case 'comment':\n if (strict && !hasSpace) {\n const message = 'Comments must be separated from other tokens by white space characters';\n onError(token, 'MISSING_CHAR', message);\n }\n length += token.source.length;\n comment = token.source.substring(1);\n break;\n case 'error':\n onError(token, 'UNEXPECTED_TOKEN', token.message);\n length += token.source.length;\n break;\n /* istanbul ignore next should not happen */\n default: {\n const message = `Unexpected token in block scalar header: ${token.type}`;\n onError(token, 'UNEXPECTED_TOKEN', message);\n const ts = token.source;\n if (ts && typeof ts === 'string')\n length += ts.length;\n }\n }\n }\n return { mode, indent, chomp, comment, length };\n}\n/** @returns Array of lines split up as `[indent, content]` */\nfunction splitLines(source) {\n const split = source.split(/\\n( *)/);\n const first = split[0];\n const m = first.match(/^( *)/);\n const line0 = m?.[1]\n ? [m[1], first.slice(m[1].length)]\n : ['', first];\n const lines = [line0];\n for (let i = 1; i < split.length; i += 2)\n lines.push([split[i], split[i + 1]]);\n return lines;\n}\n\nexports.resolveBlockScalar = resolveBlockScalar;\n","'use strict';\n\nvar YAMLSeq = require('../nodes/YAMLSeq.js');\nvar resolveProps = require('./resolve-props.js');\nvar utilFlowIndentCheck = require('./util-flow-indent-check.js');\n\nfunction resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError) {\n const seq = new YAMLSeq.YAMLSeq(ctx.schema);\n if (ctx.atRoot)\n ctx.atRoot = false;\n let offset = bs.offset;\n for (const { start, value } of bs.items) {\n const props = resolveProps.resolveProps(start, {\n indicator: 'seq-item-ind',\n next: value,\n offset,\n onError,\n startOnNewline: true\n });\n offset = props.end;\n if (!props.found) {\n if (props.anchor || props.tag || value) {\n if (value && value.type === 'block-seq')\n onError(offset, 'BAD_INDENT', 'All sequence items must start at the same column');\n else\n onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator');\n }\n else {\n // TODO: assert being at last item?\n if (props.comment)\n seq.comment = props.comment;\n continue;\n }\n }\n const node = value\n ? composeNode(ctx, value, props, onError)\n : composeEmptyNode(ctx, offset, start, null, props, onError);\n if (ctx.schema.compat)\n utilFlowIndentCheck.flowIndentCheck(bs.indent, value, onError);\n offset = node.range[2];\n seq.items.push(node);\n }\n seq.range = [bs.offset, offset, offset];\n return seq;\n}\n\nexports.resolveBlockSeq = resolveBlockSeq;\n","'use strict';\n\nfunction resolveEnd(end, offset, reqSpace, onError) {\n let comment = '';\n if (end) {\n let hasSpace = false;\n let sep = '';\n for (const token of end) {\n const { source, type } = token;\n switch (type) {\n case 'space':\n hasSpace = true;\n break;\n case 'comment': {\n if (reqSpace && !hasSpace)\n onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');\n const cb = source.substring(1) || ' ';\n if (!comment)\n comment = cb;\n else\n comment += sep + cb;\n sep = '';\n break;\n }\n case 'newline':\n if (comment)\n sep += source;\n hasSpace = true;\n break;\n default:\n onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${type} at node end`);\n }\n offset += source.length;\n }\n }\n return { comment, offset };\n}\n\nexports.resolveEnd = resolveEnd;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar Pair = require('../nodes/Pair.js');\nvar YAMLMap = require('../nodes/YAMLMap.js');\nvar YAMLSeq = require('../nodes/YAMLSeq.js');\nvar resolveEnd = require('./resolve-end.js');\nvar resolveProps = require('./resolve-props.js');\nvar utilContainsNewline = require('./util-contains-newline.js');\nvar utilMapIncludes = require('./util-map-includes.js');\n\nconst blockMsg = 'Block collections are not allowed within flow collections';\nconst isBlock = (token) => token && (token.type === 'block-map' || token.type === 'block-seq');\nfunction resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onError) {\n const isMap = fc.start.source === '{';\n const fcName = isMap ? 'flow map' : 'flow sequence';\n const coll = isMap\n ? new YAMLMap.YAMLMap(ctx.schema)\n : new YAMLSeq.YAMLSeq(ctx.schema);\n coll.flow = true;\n const atRoot = ctx.atRoot;\n if (atRoot)\n ctx.atRoot = false;\n let offset = fc.offset + fc.start.source.length;\n for (let i = 0; i < fc.items.length; ++i) {\n const collItem = fc.items[i];\n const { start, key, sep, value } = collItem;\n const props = resolveProps.resolveProps(start, {\n flow: fcName,\n indicator: 'explicit-key-ind',\n next: key ?? sep?.[0],\n offset,\n onError,\n startOnNewline: false\n });\n if (!props.found) {\n if (!props.anchor && !props.tag && !sep && !value) {\n if (i === 0 && props.comma)\n onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);\n else if (i < fc.items.length - 1)\n onError(props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}`);\n if (props.comment) {\n if (coll.comment)\n coll.comment += '\\n' + props.comment;\n else\n coll.comment = props.comment;\n }\n offset = props.end;\n continue;\n }\n if (!isMap && ctx.options.strict && utilContainsNewline.containsNewline(key))\n onError(key, // checked by containsNewline()\n 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');\n }\n if (i === 0) {\n if (props.comma)\n onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);\n }\n else {\n if (!props.comma)\n onError(props.start, 'MISSING_CHAR', `Missing , between ${fcName} items`);\n if (props.comment) {\n let prevItemComment = '';\n loop: for (const st of start) {\n switch (st.type) {\n case 'comma':\n case 'space':\n break;\n case 'comment':\n prevItemComment = st.source.substring(1);\n break loop;\n default:\n break loop;\n }\n }\n if (prevItemComment) {\n let prev = coll.items[coll.items.length - 1];\n if (Node.isPair(prev))\n prev = prev.value ?? prev.key;\n if (prev.comment)\n prev.comment += '\\n' + prevItemComment;\n else\n prev.comment = prevItemComment;\n props.comment = props.comment.substring(prevItemComment.length + 1);\n }\n }\n }\n if (!isMap && !sep && !props.found) {\n // item is a value in a seq\n // → key & sep are empty, start does not include ? or :\n const valueNode = value\n ? composeNode(ctx, value, props, onError)\n : composeEmptyNode(ctx, props.end, sep, null, props, onError);\n coll.items.push(valueNode);\n offset = valueNode.range[2];\n if (isBlock(value))\n onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);\n }\n else {\n // item is a key+value pair\n // key value\n const keyStart = props.end;\n const keyNode = key\n ? composeNode(ctx, key, props, onError)\n : composeEmptyNode(ctx, keyStart, start, null, props, onError);\n if (isBlock(key))\n onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg);\n // value properties\n const valueProps = resolveProps.resolveProps(sep ?? [], {\n flow: fcName,\n indicator: 'map-value-ind',\n next: value,\n offset: keyNode.range[2],\n onError,\n startOnNewline: false\n });\n if (valueProps.found) {\n if (!isMap && !props.found && ctx.options.strict) {\n if (sep)\n for (const st of sep) {\n if (st === valueProps.found)\n break;\n if (st.type === 'newline') {\n onError(st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');\n break;\n }\n }\n if (props.start < valueProps.found.offset - 1024)\n onError(valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key');\n }\n }\n else if (value) {\n if ('source' in value && value.source && value.source[0] === ':')\n onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`);\n else\n onError(valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items`);\n }\n // value value\n const valueNode = value\n ? composeNode(ctx, value, valueProps, onError)\n : valueProps.found\n ? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError)\n : null;\n if (valueNode) {\n if (isBlock(value))\n onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);\n }\n else if (valueProps.comment) {\n if (keyNode.comment)\n keyNode.comment += '\\n' + valueProps.comment;\n else\n keyNode.comment = valueProps.comment;\n }\n const pair = new Pair.Pair(keyNode, valueNode);\n if (ctx.options.keepSourceTokens)\n pair.srcToken = collItem;\n if (isMap) {\n const map = coll;\n if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode))\n onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');\n map.items.push(pair);\n }\n else {\n const map = new YAMLMap.YAMLMap(ctx.schema);\n map.flow = true;\n map.items.push(pair);\n coll.items.push(map);\n }\n offset = valueNode ? valueNode.range[2] : valueProps.end;\n }\n }\n const expectedEnd = isMap ? '}' : ']';\n const [ce, ...ee] = fc.end;\n let cePos = offset;\n if (ce && ce.source === expectedEnd)\n cePos = ce.offset + ce.source.length;\n else {\n const name = fcName[0].toUpperCase() + fcName.substring(1);\n const msg = atRoot\n ? `${name} must end with a ${expectedEnd}`\n : `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}`;\n onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg);\n if (ce && ce.source.length !== 1)\n ee.unshift(ce);\n }\n if (ee.length > 0) {\n const end = resolveEnd.resolveEnd(ee, cePos, ctx.options.strict, onError);\n if (end.comment) {\n if (coll.comment)\n coll.comment += '\\n' + end.comment;\n else\n coll.comment = end.comment;\n }\n coll.range = [fc.offset, cePos, end.offset];\n }\n else {\n coll.range = [fc.offset, cePos, cePos];\n }\n return coll;\n}\n\nexports.resolveFlowCollection = resolveFlowCollection;\n","'use strict';\n\nvar Scalar = require('../nodes/Scalar.js');\nvar resolveEnd = require('./resolve-end.js');\n\nfunction resolveFlowScalar(scalar, strict, onError) {\n const { offset, type, source, end } = scalar;\n let _type;\n let value;\n const _onError = (rel, code, msg) => onError(offset + rel, code, msg);\n switch (type) {\n case 'scalar':\n _type = Scalar.Scalar.PLAIN;\n value = plainValue(source, _onError);\n break;\n case 'single-quoted-scalar':\n _type = Scalar.Scalar.QUOTE_SINGLE;\n value = singleQuotedValue(source, _onError);\n break;\n case 'double-quoted-scalar':\n _type = Scalar.Scalar.QUOTE_DOUBLE;\n value = doubleQuotedValue(source, _onError);\n break;\n /* istanbul ignore next should not happen */\n default:\n onError(scalar, 'UNEXPECTED_TOKEN', `Expected a flow scalar value, but found: ${type}`);\n return {\n value: '',\n type: null,\n comment: '',\n range: [offset, offset + source.length, offset + source.length]\n };\n }\n const valueEnd = offset + source.length;\n const re = resolveEnd.resolveEnd(end, valueEnd, strict, onError);\n return {\n value,\n type: _type,\n comment: re.comment,\n range: [offset, valueEnd, re.offset]\n };\n}\nfunction plainValue(source, onError) {\n let badChar = '';\n switch (source[0]) {\n /* istanbul ignore next should not happen */\n case '\\t':\n badChar = 'a tab character';\n break;\n case ',':\n badChar = 'flow indicator character ,';\n break;\n case '%':\n badChar = 'directive indicator character %';\n break;\n case '|':\n case '>': {\n badChar = `block scalar indicator ${source[0]}`;\n break;\n }\n case '@':\n case '`': {\n badChar = `reserved character ${source[0]}`;\n break;\n }\n }\n if (badChar)\n onError(0, 'BAD_SCALAR_START', `Plain value cannot start with ${badChar}`);\n return foldLines(source);\n}\nfunction singleQuotedValue(source, onError) {\n if (source[source.length - 1] !== \"'\" || source.length === 1)\n onError(source.length, 'MISSING_CHAR', \"Missing closing 'quote\");\n return foldLines(source.slice(1, -1)).replace(/''/g, \"'\");\n}\nfunction foldLines(source) {\n /**\n * The negative lookbehind here and in the `re` RegExp is to\n * prevent causing a polynomial search time in certain cases.\n *\n * The try-catch is for Safari, which doesn't support this yet:\n * https://caniuse.com/js-regexp-lookbehind\n */\n let first, line;\n try {\n first = new RegExp('(.*?)(? wsStart ? source.slice(wsStart, i + 1) : ch;\n }\n else {\n res += ch;\n }\n }\n if (source[source.length - 1] !== '\"' || source.length === 1)\n onError(source.length, 'MISSING_CHAR', 'Missing closing \"quote');\n return res;\n}\n/**\n * Fold a single newline into a space, multiple newlines to N - 1 newlines.\n * Presumes `source[offset] === '\\n'`\n */\nfunction foldNewline(source, offset) {\n let fold = '';\n let ch = source[offset + 1];\n while (ch === ' ' || ch === '\\t' || ch === '\\n' || ch === '\\r') {\n if (ch === '\\r' && source[offset + 2] !== '\\n')\n break;\n if (ch === '\\n')\n fold += '\\n';\n offset += 1;\n ch = source[offset + 1];\n }\n if (!fold)\n fold = ' ';\n return { fold, offset };\n}\nconst escapeCodes = {\n '0': '\\0',\n a: '\\x07',\n b: '\\b',\n e: '\\x1b',\n f: '\\f',\n n: '\\n',\n r: '\\r',\n t: '\\t',\n v: '\\v',\n N: '\\u0085',\n _: '\\u00a0',\n L: '\\u2028',\n P: '\\u2029',\n ' ': ' ',\n '\"': '\"',\n '/': '/',\n '\\\\': '\\\\',\n '\\t': '\\t'\n};\nfunction parseCharCode(source, offset, length, onError) {\n const cc = source.substr(offset, length);\n const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc);\n const code = ok ? parseInt(cc, 16) : NaN;\n if (isNaN(code)) {\n const raw = source.substr(offset - 2, length + 2);\n onError(offset - 2, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`);\n return raw;\n }\n return String.fromCodePoint(code);\n}\n\nexports.resolveFlowScalar = resolveFlowScalar;\n","'use strict';\n\nfunction resolveProps(tokens, { flow, indicator, next, offset, onError, startOnNewline }) {\n let spaceBefore = false;\n let atNewline = startOnNewline;\n let hasSpace = startOnNewline;\n let comment = '';\n let commentSep = '';\n let hasNewline = false;\n let hasNewlineAfterProp = false;\n let reqSpace = false;\n let anchor = null;\n let tag = null;\n let comma = null;\n let found = null;\n let start = null;\n for (const token of tokens) {\n if (reqSpace) {\n if (token.type !== 'space' &&\n token.type !== 'newline' &&\n token.type !== 'comma')\n onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');\n reqSpace = false;\n }\n switch (token.type) {\n case 'space':\n // At the doc level, tabs at line start may be parsed\n // as leading white space rather than indentation.\n // In a flow collection, only the parser handles indent.\n if (!flow &&\n atNewline &&\n indicator !== 'doc-start' &&\n token.source[0] === '\\t')\n onError(token, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation');\n hasSpace = true;\n break;\n case 'comment': {\n if (!hasSpace)\n onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');\n const cb = token.source.substring(1) || ' ';\n if (!comment)\n comment = cb;\n else\n comment += commentSep + cb;\n commentSep = '';\n atNewline = false;\n break;\n }\n case 'newline':\n if (atNewline) {\n if (comment)\n comment += token.source;\n else\n spaceBefore = true;\n }\n else\n commentSep += token.source;\n atNewline = true;\n hasNewline = true;\n if (anchor || tag)\n hasNewlineAfterProp = true;\n hasSpace = true;\n break;\n case 'anchor':\n if (anchor)\n onError(token, 'MULTIPLE_ANCHORS', 'A node can have at most one anchor');\n if (token.source.endsWith(':'))\n onError(token.offset + token.source.length - 1, 'BAD_ALIAS', 'Anchor ending in : is ambiguous', true);\n anchor = token;\n if (start === null)\n start = token.offset;\n atNewline = false;\n hasSpace = false;\n reqSpace = true;\n break;\n case 'tag': {\n if (tag)\n onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag');\n tag = token;\n if (start === null)\n start = token.offset;\n atNewline = false;\n hasSpace = false;\n reqSpace = true;\n break;\n }\n case indicator:\n // Could here handle preceding comments differently\n if (anchor || tag)\n onError(token, 'BAD_PROP_ORDER', `Anchors and tags must be after the ${token.source} indicator`);\n if (found)\n onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow ?? 'collection'}`);\n found = token;\n atNewline = false;\n hasSpace = false;\n break;\n case 'comma':\n if (flow) {\n if (comma)\n onError(token, 'UNEXPECTED_TOKEN', `Unexpected , in ${flow}`);\n comma = token;\n atNewline = false;\n hasSpace = false;\n break;\n }\n // else fallthrough\n default:\n onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.type} token`);\n atNewline = false;\n hasSpace = false;\n }\n }\n const last = tokens[tokens.length - 1];\n const end = last ? last.offset + last.source.length : offset;\n if (reqSpace &&\n next &&\n next.type !== 'space' &&\n next.type !== 'newline' &&\n next.type !== 'comma' &&\n (next.type !== 'scalar' || next.source !== ''))\n onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');\n return {\n comma,\n found,\n spaceBefore,\n comment,\n hasNewline,\n hasNewlineAfterProp,\n anchor,\n tag,\n end,\n start: start ?? end\n };\n}\n\nexports.resolveProps = resolveProps;\n","'use strict';\n\nfunction containsNewline(key) {\n if (!key)\n return null;\n switch (key.type) {\n case 'alias':\n case 'scalar':\n case 'double-quoted-scalar':\n case 'single-quoted-scalar':\n if (key.source.includes('\\n'))\n return true;\n if (key.end)\n for (const st of key.end)\n if (st.type === 'newline')\n return true;\n return false;\n case 'flow-collection':\n for (const it of key.items) {\n for (const st of it.start)\n if (st.type === 'newline')\n return true;\n if (it.sep)\n for (const st of it.sep)\n if (st.type === 'newline')\n return true;\n if (containsNewline(it.key) || containsNewline(it.value))\n return true;\n }\n return false;\n default:\n return true;\n }\n}\n\nexports.containsNewline = containsNewline;\n","'use strict';\n\nfunction emptyScalarPosition(offset, before, pos) {\n if (before) {\n if (pos === null)\n pos = before.length;\n for (let i = pos - 1; i >= 0; --i) {\n let st = before[i];\n switch (st.type) {\n case 'space':\n case 'comment':\n case 'newline':\n offset -= st.source.length;\n continue;\n }\n // Technically, an empty scalar is immediately after the last non-empty\n // node, but it's more useful to place it after any whitespace.\n st = before[++i];\n while (st?.type === 'space') {\n offset += st.source.length;\n st = before[++i];\n }\n break;\n }\n }\n return offset;\n}\n\nexports.emptyScalarPosition = emptyScalarPosition;\n","'use strict';\n\nvar utilContainsNewline = require('./util-contains-newline.js');\n\nfunction flowIndentCheck(indent, fc, onError) {\n if (fc?.type === 'flow-collection') {\n const end = fc.end[0];\n if (end.indent === indent &&\n (end.source === ']' || end.source === '}') &&\n utilContainsNewline.containsNewline(fc)) {\n const msg = 'Flow end indicator should be more indented than parent';\n onError(end, 'BAD_INDENT', msg, true);\n }\n }\n}\n\nexports.flowIndentCheck = flowIndentCheck;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\n\nfunction mapIncludes(ctx, items, search) {\n const { uniqueKeys } = ctx.options;\n if (uniqueKeys === false)\n return false;\n const isEqual = typeof uniqueKeys === 'function'\n ? uniqueKeys\n : (a, b) => a === b ||\n (Node.isScalar(a) &&\n Node.isScalar(b) &&\n a.value === b.value &&\n !(a.value === '<<' && ctx.schema.merge));\n return items.some(pair => isEqual(pair.key, search));\n}\n\nexports.mapIncludes = mapIncludes;\n","'use strict';\n\nvar Alias = require('../nodes/Alias.js');\nvar Collection = require('../nodes/Collection.js');\nvar Node = require('../nodes/Node.js');\nvar Pair = require('../nodes/Pair.js');\nvar toJS = require('../nodes/toJS.js');\nvar Schema = require('../schema/Schema.js');\nvar stringify = require('../stringify/stringify.js');\nvar stringifyDocument = require('../stringify/stringifyDocument.js');\nvar anchors = require('./anchors.js');\nvar applyReviver = require('./applyReviver.js');\nvar createNode = require('./createNode.js');\nvar directives = require('./directives.js');\n\nclass Document {\n constructor(value, replacer, options) {\n /** A comment before this Document */\n this.commentBefore = null;\n /** A comment immediately after this Document */\n this.comment = null;\n /** Errors encountered during parsing. */\n this.errors = [];\n /** Warnings encountered during parsing. */\n this.warnings = [];\n Object.defineProperty(this, Node.NODE_TYPE, { value: Node.DOC });\n let _replacer = null;\n if (typeof replacer === 'function' || Array.isArray(replacer)) {\n _replacer = replacer;\n }\n else if (options === undefined && replacer) {\n options = replacer;\n replacer = undefined;\n }\n const opt = Object.assign({\n intAsBigInt: false,\n keepSourceTokens: false,\n logLevel: 'warn',\n prettyErrors: true,\n strict: true,\n uniqueKeys: true,\n version: '1.2'\n }, options);\n this.options = opt;\n let { version } = opt;\n if (options?._directives) {\n this.directives = options._directives.atDocument();\n if (this.directives.yaml.explicit)\n version = this.directives.yaml.version;\n }\n else\n this.directives = new directives.Directives({ version });\n this.setSchema(version, options);\n if (value === undefined)\n this.contents = null;\n else {\n this.contents = this.createNode(value, _replacer, options);\n }\n }\n /**\n * Create a deep copy of this Document and its contents.\n *\n * Custom Node values that inherit from `Object` still refer to their original instances.\n */\n clone() {\n const copy = Object.create(Document.prototype, {\n [Node.NODE_TYPE]: { value: Node.DOC }\n });\n copy.commentBefore = this.commentBefore;\n copy.comment = this.comment;\n copy.errors = this.errors.slice();\n copy.warnings = this.warnings.slice();\n copy.options = Object.assign({}, this.options);\n if (this.directives)\n copy.directives = this.directives.clone();\n copy.schema = this.schema.clone();\n copy.contents = Node.isNode(this.contents)\n ? this.contents.clone(copy.schema)\n : this.contents;\n if (this.range)\n copy.range = this.range.slice();\n return copy;\n }\n /** Adds a value to the document. */\n add(value) {\n if (assertCollection(this.contents))\n this.contents.add(value);\n }\n /** Adds a value to the document. */\n addIn(path, value) {\n if (assertCollection(this.contents))\n this.contents.addIn(path, value);\n }\n /**\n * Create a new `Alias` node, ensuring that the target `node` has the required anchor.\n *\n * If `node` already has an anchor, `name` is ignored.\n * Otherwise, the `node.anchor` value will be set to `name`,\n * or if an anchor with that name is already present in the document,\n * `name` will be used as a prefix for a new unique anchor.\n * If `name` is undefined, the generated anchor will use 'a' as a prefix.\n */\n createAlias(node, name) {\n if (!node.anchor) {\n const prev = anchors.anchorNames(this);\n node.anchor =\n // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing\n !name || prev.has(name) ? anchors.findNewAnchor(name || 'a', prev) : name;\n }\n return new Alias.Alias(node.anchor);\n }\n createNode(value, replacer, options) {\n let _replacer = undefined;\n if (typeof replacer === 'function') {\n value = replacer.call({ '': value }, '', value);\n _replacer = replacer;\n }\n else if (Array.isArray(replacer)) {\n const keyToStr = (v) => typeof v === 'number' || v instanceof String || v instanceof Number;\n const asStr = replacer.filter(keyToStr).map(String);\n if (asStr.length > 0)\n replacer = replacer.concat(asStr);\n _replacer = replacer;\n }\n else if (options === undefined && replacer) {\n options = replacer;\n replacer = undefined;\n }\n const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options ?? {};\n const { onAnchor, setAnchors, sourceObjects } = anchors.createNodeAnchors(this, \n // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing\n anchorPrefix || 'a');\n const ctx = {\n aliasDuplicateObjects: aliasDuplicateObjects ?? true,\n keepUndefined: keepUndefined ?? false,\n onAnchor,\n onTagObj,\n replacer: _replacer,\n schema: this.schema,\n sourceObjects\n };\n const node = createNode.createNode(value, tag, ctx);\n if (flow && Node.isCollection(node))\n node.flow = true;\n setAnchors();\n return node;\n }\n /**\n * Convert a key and a value into a `Pair` using the current schema,\n * recursively wrapping all values as `Scalar` or `Collection` nodes.\n */\n createPair(key, value, options = {}) {\n const k = this.createNode(key, null, options);\n const v = this.createNode(value, null, options);\n return new Pair.Pair(k, v);\n }\n /**\n * Removes a value from the document.\n * @returns `true` if the item was found and removed.\n */\n delete(key) {\n return assertCollection(this.contents) ? this.contents.delete(key) : false;\n }\n /**\n * Removes a value from the document.\n * @returns `true` if the item was found and removed.\n */\n deleteIn(path) {\n if (Collection.isEmptyPath(path)) {\n if (this.contents == null)\n return false;\n this.contents = null;\n return true;\n }\n return assertCollection(this.contents)\n ? this.contents.deleteIn(path)\n : false;\n }\n /**\n * Returns item at `key`, or `undefined` if not found. By default unwraps\n * scalar values from their surrounding node; to disable set `keepScalar` to\n * `true` (collections are always returned intact).\n */\n get(key, keepScalar) {\n return Node.isCollection(this.contents)\n ? this.contents.get(key, keepScalar)\n : undefined;\n }\n /**\n * Returns item at `path`, or `undefined` if not found. By default unwraps\n * scalar values from their surrounding node; to disable set `keepScalar` to\n * `true` (collections are always returned intact).\n */\n getIn(path, keepScalar) {\n if (Collection.isEmptyPath(path))\n return !keepScalar && Node.isScalar(this.contents)\n ? this.contents.value\n : this.contents;\n return Node.isCollection(this.contents)\n ? this.contents.getIn(path, keepScalar)\n : undefined;\n }\n /**\n * Checks if the document includes a value with the key `key`.\n */\n has(key) {\n return Node.isCollection(this.contents) ? this.contents.has(key) : false;\n }\n /**\n * Checks if the document includes a value at `path`.\n */\n hasIn(path) {\n if (Collection.isEmptyPath(path))\n return this.contents !== undefined;\n return Node.isCollection(this.contents) ? this.contents.hasIn(path) : false;\n }\n /**\n * Sets a value in this document. For `!!set`, `value` needs to be a\n * boolean to add/remove the item from the set.\n */\n set(key, value) {\n if (this.contents == null) {\n this.contents = Collection.collectionFromPath(this.schema, [key], value);\n }\n else if (assertCollection(this.contents)) {\n this.contents.set(key, value);\n }\n }\n /**\n * Sets a value in this document. For `!!set`, `value` needs to be a\n * boolean to add/remove the item from the set.\n */\n setIn(path, value) {\n if (Collection.isEmptyPath(path))\n this.contents = value;\n else if (this.contents == null) {\n this.contents = Collection.collectionFromPath(this.schema, Array.from(path), value);\n }\n else if (assertCollection(this.contents)) {\n this.contents.setIn(path, value);\n }\n }\n /**\n * Change the YAML version and schema used by the document.\n * A `null` version disables support for directives, explicit tags, anchors, and aliases.\n * It also requires the `schema` option to be given as a `Schema` instance value.\n *\n * Overrides all previously set schema options.\n */\n setSchema(version, options = {}) {\n if (typeof version === 'number')\n version = String(version);\n let opt;\n switch (version) {\n case '1.1':\n if (this.directives)\n this.directives.yaml.version = '1.1';\n else\n this.directives = new directives.Directives({ version: '1.1' });\n opt = { merge: true, resolveKnownTags: false, schema: 'yaml-1.1' };\n break;\n case '1.2':\n case 'next':\n if (this.directives)\n this.directives.yaml.version = version;\n else\n this.directives = new directives.Directives({ version });\n opt = { merge: false, resolveKnownTags: true, schema: 'core' };\n break;\n case null:\n if (this.directives)\n delete this.directives;\n opt = null;\n break;\n default: {\n const sv = JSON.stringify(version);\n throw new Error(`Expected '1.1', '1.2' or null as first argument, but found: ${sv}`);\n }\n }\n // Not using `instanceof Schema` to allow for duck typing\n if (options.schema instanceof Object)\n this.schema = options.schema;\n else if (opt)\n this.schema = new Schema.Schema(Object.assign(opt, options));\n else\n throw new Error(`With a null YAML version, the { schema: Schema } option is required`);\n }\n // json & jsonArg are only used from toJSON()\n toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver } = {}) {\n const ctx = {\n anchors: new Map(),\n doc: this,\n keep: !json,\n mapAsMap: mapAsMap === true,\n mapKeyWarned: false,\n maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100,\n stringify: stringify.stringify\n };\n const res = toJS.toJS(this.contents, jsonArg ?? '', ctx);\n if (typeof onAnchor === 'function')\n for (const { count, res } of ctx.anchors.values())\n onAnchor(res, count);\n return typeof reviver === 'function'\n ? applyReviver.applyReviver(reviver, { '': res }, '', res)\n : res;\n }\n /**\n * A JSON representation of the document `contents`.\n *\n * @param jsonArg Used by `JSON.stringify` to indicate the array index or\n * property name.\n */\n toJSON(jsonArg, onAnchor) {\n return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor });\n }\n /** A YAML representation of the document. */\n toString(options = {}) {\n if (this.errors.length > 0)\n throw new Error('Document with errors cannot be stringified');\n if ('indent' in options &&\n (!Number.isInteger(options.indent) || Number(options.indent) <= 0)) {\n const s = JSON.stringify(options.indent);\n throw new Error(`\"indent\" option must be a positive integer, not ${s}`);\n }\n return stringifyDocument.stringifyDocument(this, options);\n }\n}\nfunction assertCollection(contents) {\n if (Node.isCollection(contents))\n return true;\n throw new Error('Expected a YAML collection as document contents');\n}\n\nexports.Document = Document;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar visit = require('../visit.js');\n\n/**\n * Verify that the input string is a valid anchor.\n *\n * Will throw on errors.\n */\nfunction anchorIsValid(anchor) {\n if (/[\\x00-\\x19\\s,[\\]{}]/.test(anchor)) {\n const sa = JSON.stringify(anchor);\n const msg = `Anchor must not contain whitespace or control characters: ${sa}`;\n throw new Error(msg);\n }\n return true;\n}\nfunction anchorNames(root) {\n const anchors = new Set();\n visit.visit(root, {\n Value(_key, node) {\n if (node.anchor)\n anchors.add(node.anchor);\n }\n });\n return anchors;\n}\n/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */\nfunction findNewAnchor(prefix, exclude) {\n for (let i = 1; true; ++i) {\n const name = `${prefix}${i}`;\n if (!exclude.has(name))\n return name;\n }\n}\nfunction createNodeAnchors(doc, prefix) {\n const aliasObjects = [];\n const sourceObjects = new Map();\n let prevAnchors = null;\n return {\n onAnchor: (source) => {\n aliasObjects.push(source);\n if (!prevAnchors)\n prevAnchors = anchorNames(doc);\n const anchor = findNewAnchor(prefix, prevAnchors);\n prevAnchors.add(anchor);\n return anchor;\n },\n /**\n * With circular references, the source node is only resolved after all\n * of its child nodes are. This is why anchors are set only after all of\n * the nodes have been created.\n */\n setAnchors: () => {\n for (const source of aliasObjects) {\n const ref = sourceObjects.get(source);\n if (typeof ref === 'object' &&\n ref.anchor &&\n (Node.isScalar(ref.node) || Node.isCollection(ref.node))) {\n ref.node.anchor = ref.anchor;\n }\n else {\n const error = new Error('Failed to resolve repeated object (this should not happen)');\n error.source = source;\n throw error;\n }\n }\n },\n sourceObjects\n };\n}\n\nexports.anchorIsValid = anchorIsValid;\nexports.anchorNames = anchorNames;\nexports.createNodeAnchors = createNodeAnchors;\nexports.findNewAnchor = findNewAnchor;\n","'use strict';\n\n/**\n * Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec,\n * in section 24.5.1.1 \"Runtime Semantics: InternalizeJSONProperty\" of the\n * 2021 edition: https://tc39.es/ecma262/#sec-json.parse\n *\n * Includes extensions for handling Map and Set objects.\n */\nfunction applyReviver(reviver, obj, key, val) {\n if (val && typeof val === 'object') {\n if (Array.isArray(val)) {\n for (let i = 0, len = val.length; i < len; ++i) {\n const v0 = val[i];\n const v1 = applyReviver(reviver, val, String(i), v0);\n if (v1 === undefined)\n delete val[i];\n else if (v1 !== v0)\n val[i] = v1;\n }\n }\n else if (val instanceof Map) {\n for (const k of Array.from(val.keys())) {\n const v0 = val.get(k);\n const v1 = applyReviver(reviver, val, k, v0);\n if (v1 === undefined)\n val.delete(k);\n else if (v1 !== v0)\n val.set(k, v1);\n }\n }\n else if (val instanceof Set) {\n for (const v0 of Array.from(val)) {\n const v1 = applyReviver(reviver, val, v0, v0);\n if (v1 === undefined)\n val.delete(v0);\n else if (v1 !== v0) {\n val.delete(v0);\n val.add(v1);\n }\n }\n }\n else {\n for (const [k, v0] of Object.entries(val)) {\n const v1 = applyReviver(reviver, val, k, v0);\n if (v1 === undefined)\n delete val[k];\n else if (v1 !== v0)\n val[k] = v1;\n }\n }\n }\n return reviver.call(obj, key, val);\n}\n\nexports.applyReviver = applyReviver;\n","'use strict';\n\nvar Alias = require('../nodes/Alias.js');\nvar Node = require('../nodes/Node.js');\nvar Scalar = require('../nodes/Scalar.js');\n\nconst defaultTagPrefix = 'tag:yaml.org,2002:';\nfunction findTagObject(value, tagName, tags) {\n if (tagName) {\n const match = tags.filter(t => t.tag === tagName);\n const tagObj = match.find(t => !t.format) ?? match[0];\n if (!tagObj)\n throw new Error(`Tag ${tagName} not found`);\n return tagObj;\n }\n return tags.find(t => t.identify?.(value) && !t.format);\n}\nfunction createNode(value, tagName, ctx) {\n if (Node.isDocument(value))\n value = value.contents;\n if (Node.isNode(value))\n return value;\n if (Node.isPair(value)) {\n const map = ctx.schema[Node.MAP].createNode?.(ctx.schema, null, ctx);\n map.items.push(value);\n return map;\n }\n if (value instanceof String ||\n value instanceof Number ||\n value instanceof Boolean ||\n (typeof BigInt === 'function' && value instanceof BigInt) // not supported everywhere\n ) {\n // https://tc39.es/ecma262/#sec-serializejsonproperty\n value = value.valueOf();\n }\n const { aliasDuplicateObjects, onAnchor, onTagObj, schema, sourceObjects } = ctx;\n // Detect duplicate references to the same object & use Alias nodes for all\n // after first. The `ref` wrapper allows for circular references to resolve.\n let ref = undefined;\n if (aliasDuplicateObjects && value && typeof value === 'object') {\n ref = sourceObjects.get(value);\n if (ref) {\n if (!ref.anchor)\n ref.anchor = onAnchor(value);\n return new Alias.Alias(ref.anchor);\n }\n else {\n ref = { anchor: null, node: null };\n sourceObjects.set(value, ref);\n }\n }\n if (tagName?.startsWith('!!'))\n tagName = defaultTagPrefix + tagName.slice(2);\n let tagObj = findTagObject(value, tagName, schema.tags);\n if (!tagObj) {\n if (value && typeof value.toJSON === 'function') {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-call\n value = value.toJSON();\n }\n if (!value || typeof value !== 'object') {\n const node = new Scalar.Scalar(value);\n if (ref)\n ref.node = node;\n return node;\n }\n tagObj =\n value instanceof Map\n ? schema[Node.MAP]\n : Symbol.iterator in Object(value)\n ? schema[Node.SEQ]\n : schema[Node.MAP];\n }\n if (onTagObj) {\n onTagObj(tagObj);\n delete ctx.onTagObj;\n }\n const node = tagObj?.createNode\n ? tagObj.createNode(ctx.schema, value, ctx)\n : new Scalar.Scalar(value);\n if (tagName)\n node.tag = tagName;\n if (ref)\n ref.node = node;\n return node;\n}\n\nexports.createNode = createNode;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar visit = require('../visit.js');\n\nconst escapeChars = {\n '!': '%21',\n ',': '%2C',\n '[': '%5B',\n ']': '%5D',\n '{': '%7B',\n '}': '%7D'\n};\nconst escapeTagName = (tn) => tn.replace(/[!,[\\]{}]/g, ch => escapeChars[ch]);\nclass Directives {\n constructor(yaml, tags) {\n /**\n * The directives-end/doc-start marker `---`. If `null`, a marker may still be\n * included in the document's stringified representation.\n */\n this.docStart = null;\n /** The doc-end marker `...`. */\n this.docEnd = false;\n this.yaml = Object.assign({}, Directives.defaultYaml, yaml);\n this.tags = Object.assign({}, Directives.defaultTags, tags);\n }\n clone() {\n const copy = new Directives(this.yaml, this.tags);\n copy.docStart = this.docStart;\n return copy;\n }\n /**\n * During parsing, get a Directives instance for the current document and\n * update the stream state according to the current version's spec.\n */\n atDocument() {\n const res = new Directives(this.yaml, this.tags);\n switch (this.yaml.version) {\n case '1.1':\n this.atNextDocument = true;\n break;\n case '1.2':\n this.atNextDocument = false;\n this.yaml = {\n explicit: Directives.defaultYaml.explicit,\n version: '1.2'\n };\n this.tags = Object.assign({}, Directives.defaultTags);\n break;\n }\n return res;\n }\n /**\n * @param onError - May be called even if the action was successful\n * @returns `true` on success\n */\n add(line, onError) {\n if (this.atNextDocument) {\n this.yaml = { explicit: Directives.defaultYaml.explicit, version: '1.1' };\n this.tags = Object.assign({}, Directives.defaultTags);\n this.atNextDocument = false;\n }\n const parts = line.trim().split(/[ \\t]+/);\n const name = parts.shift();\n switch (name) {\n case '%TAG': {\n if (parts.length !== 2) {\n onError(0, '%TAG directive should contain exactly two parts');\n if (parts.length < 2)\n return false;\n }\n const [handle, prefix] = parts;\n this.tags[handle] = prefix;\n return true;\n }\n case '%YAML': {\n this.yaml.explicit = true;\n if (parts.length !== 1) {\n onError(0, '%YAML directive should contain exactly one part');\n return false;\n }\n const [version] = parts;\n if (version === '1.1' || version === '1.2') {\n this.yaml.version = version;\n return true;\n }\n else {\n const isValid = /^\\d+\\.\\d+$/.test(version);\n onError(6, `Unsupported YAML version ${version}`, isValid);\n return false;\n }\n }\n default:\n onError(0, `Unknown directive ${name}`, true);\n return false;\n }\n }\n /**\n * Resolves a tag, matching handles to those defined in %TAG directives.\n *\n * @returns Resolved tag, which may also be the non-specific tag `'!'` or a\n * `'!local'` tag, or `null` if unresolvable.\n */\n tagName(source, onError) {\n if (source === '!')\n return '!'; // non-specific tag\n if (source[0] !== '!') {\n onError(`Not a valid tag: ${source}`);\n return null;\n }\n if (source[1] === '<') {\n const verbatim = source.slice(2, -1);\n if (verbatim === '!' || verbatim === '!!') {\n onError(`Verbatim tags aren't resolved, so ${source} is invalid.`);\n return null;\n }\n if (source[source.length - 1] !== '>')\n onError('Verbatim tags must end with a >');\n return verbatim;\n }\n const [, handle, suffix] = source.match(/^(.*!)([^!]*)$/);\n if (!suffix)\n onError(`The ${source} tag has no suffix`);\n const prefix = this.tags[handle];\n if (prefix)\n return prefix + decodeURIComponent(suffix);\n if (handle === '!')\n return source; // local tag\n onError(`Could not resolve tag: ${source}`);\n return null;\n }\n /**\n * Given a fully resolved tag, returns its printable string form,\n * taking into account current tag prefixes and defaults.\n */\n tagString(tag) {\n for (const [handle, prefix] of Object.entries(this.tags)) {\n if (tag.startsWith(prefix))\n return handle + escapeTagName(tag.substring(prefix.length));\n }\n return tag[0] === '!' ? tag : `!<${tag}>`;\n }\n toString(doc) {\n const lines = this.yaml.explicit\n ? [`%YAML ${this.yaml.version || '1.2'}`]\n : [];\n const tagEntries = Object.entries(this.tags);\n let tagNames;\n if (doc && tagEntries.length > 0 && Node.isNode(doc.contents)) {\n const tags = {};\n visit.visit(doc.contents, (_key, node) => {\n if (Node.isNode(node) && node.tag)\n tags[node.tag] = true;\n });\n tagNames = Object.keys(tags);\n }\n else\n tagNames = [];\n for (const [handle, prefix] of tagEntries) {\n if (handle === '!!' && prefix === 'tag:yaml.org,2002:')\n continue;\n if (!doc || tagNames.some(tn => tn.startsWith(prefix)))\n lines.push(`%TAG ${handle} ${prefix}`);\n }\n return lines.join('\\n');\n }\n}\nDirectives.defaultYaml = { explicit: false, version: '1.2' };\nDirectives.defaultTags = { '!!': 'tag:yaml.org,2002:' };\n\nexports.Directives = Directives;\n","'use strict';\n\nclass YAMLError extends Error {\n constructor(name, pos, code, message) {\n super();\n this.name = name;\n this.code = code;\n this.message = message;\n this.pos = pos;\n }\n}\nclass YAMLParseError extends YAMLError {\n constructor(pos, code, message) {\n super('YAMLParseError', pos, code, message);\n }\n}\nclass YAMLWarning extends YAMLError {\n constructor(pos, code, message) {\n super('YAMLWarning', pos, code, message);\n }\n}\nconst prettifyError = (src, lc) => (error) => {\n if (error.pos[0] === -1)\n return;\n error.linePos = error.pos.map(pos => lc.linePos(pos));\n const { line, col } = error.linePos[0];\n error.message += ` at line ${line}, column ${col}`;\n let ci = col - 1;\n let lineStr = src\n .substring(lc.lineStarts[line - 1], lc.lineStarts[line])\n .replace(/[\\n\\r]+$/, '');\n // Trim to max 80 chars, keeping col position near the middle\n if (ci >= 60 && lineStr.length > 80) {\n const trimStart = Math.min(ci - 39, lineStr.length - 79);\n lineStr = '…' + lineStr.substring(trimStart);\n ci -= trimStart - 1;\n }\n if (lineStr.length > 80)\n lineStr = lineStr.substring(0, 79) + '…';\n // Include previous line in context if pointing at line start\n if (line > 1 && /^ *$/.test(lineStr.substring(0, ci))) {\n // Regexp won't match if start is trimmed\n let prev = src.substring(lc.lineStarts[line - 2], lc.lineStarts[line - 1]);\n if (prev.length > 80)\n prev = prev.substring(0, 79) + '…\\n';\n lineStr = prev + lineStr;\n }\n if (/[^ ]/.test(lineStr)) {\n let count = 1;\n const end = error.linePos[1];\n if (end && end.line === line && end.col > col) {\n count = Math.min(end.col - col, 80 - ci);\n }\n const pointer = ' '.repeat(ci) + '^'.repeat(count);\n error.message += `:\\n\\n${lineStr}\\n${pointer}\\n`;\n }\n};\n\nexports.YAMLError = YAMLError;\nexports.YAMLParseError = YAMLParseError;\nexports.YAMLWarning = YAMLWarning;\nexports.prettifyError = prettifyError;\n","'use strict';\n\nvar composer = require('./compose/composer.js');\nvar Document = require('./doc/Document.js');\nvar Schema = require('./schema/Schema.js');\nvar errors = require('./errors.js');\nvar Alias = require('./nodes/Alias.js');\nvar Node = require('./nodes/Node.js');\nvar Pair = require('./nodes/Pair.js');\nvar Scalar = require('./nodes/Scalar.js');\nvar YAMLMap = require('./nodes/YAMLMap.js');\nvar YAMLSeq = require('./nodes/YAMLSeq.js');\nvar cst = require('./parse/cst.js');\nvar lexer = require('./parse/lexer.js');\nvar lineCounter = require('./parse/line-counter.js');\nvar parser = require('./parse/parser.js');\nvar publicApi = require('./public-api.js');\nvar visit = require('./visit.js');\n\n\n\nexports.Composer = composer.Composer;\nexports.Document = Document.Document;\nexports.Schema = Schema.Schema;\nexports.YAMLError = errors.YAMLError;\nexports.YAMLParseError = errors.YAMLParseError;\nexports.YAMLWarning = errors.YAMLWarning;\nexports.Alias = Alias.Alias;\nexports.isAlias = Node.isAlias;\nexports.isCollection = Node.isCollection;\nexports.isDocument = Node.isDocument;\nexports.isMap = Node.isMap;\nexports.isNode = Node.isNode;\nexports.isPair = Node.isPair;\nexports.isScalar = Node.isScalar;\nexports.isSeq = Node.isSeq;\nexports.Pair = Pair.Pair;\nexports.Scalar = Scalar.Scalar;\nexports.YAMLMap = YAMLMap.YAMLMap;\nexports.YAMLSeq = YAMLSeq.YAMLSeq;\nexports.CST = cst;\nexports.Lexer = lexer.Lexer;\nexports.LineCounter = lineCounter.LineCounter;\nexports.Parser = parser.Parser;\nexports.parse = publicApi.parse;\nexports.parseAllDocuments = publicApi.parseAllDocuments;\nexports.parseDocument = publicApi.parseDocument;\nexports.stringify = publicApi.stringify;\nexports.visit = visit.visit;\nexports.visitAsync = visit.visitAsync;\n","'use strict';\n\nfunction debug(logLevel, ...messages) {\n if (logLevel === 'debug')\n console.log(...messages);\n}\nfunction warn(logLevel, warning) {\n if (logLevel === 'debug' || logLevel === 'warn') {\n if (typeof process !== 'undefined' && process.emitWarning)\n process.emitWarning(warning);\n else\n console.warn(warning);\n }\n}\n\nexports.debug = debug;\nexports.warn = warn;\n","'use strict';\n\nvar anchors = require('../doc/anchors.js');\nvar visit = require('../visit.js');\nvar Node = require('./Node.js');\n\nclass Alias extends Node.NodeBase {\n constructor(source) {\n super(Node.ALIAS);\n this.source = source;\n Object.defineProperty(this, 'tag', {\n set() {\n throw new Error('Alias nodes cannot have tags');\n }\n });\n }\n /**\n * Resolve the value of this alias within `doc`, finding the last\n * instance of the `source` anchor before this node.\n */\n resolve(doc) {\n let found = undefined;\n visit.visit(doc, {\n Node: (_key, node) => {\n if (node === this)\n return visit.visit.BREAK;\n if (node.anchor === this.source)\n found = node;\n }\n });\n return found;\n }\n toJSON(_arg, ctx) {\n if (!ctx)\n return { source: this.source };\n const { anchors, doc, maxAliasCount } = ctx;\n const source = this.resolve(doc);\n if (!source) {\n const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;\n throw new ReferenceError(msg);\n }\n const data = anchors.get(source);\n /* istanbul ignore if */\n if (!data || data.res === undefined) {\n const msg = 'This should not happen: Alias anchor was not resolved?';\n throw new ReferenceError(msg);\n }\n if (maxAliasCount >= 0) {\n data.count += 1;\n if (data.aliasCount === 0)\n data.aliasCount = getAliasCount(doc, source, anchors);\n if (data.count * data.aliasCount > maxAliasCount) {\n const msg = 'Excessive alias count indicates a resource exhaustion attack';\n throw new ReferenceError(msg);\n }\n }\n return data.res;\n }\n toString(ctx, _onComment, _onChompKeep) {\n const src = `*${this.source}`;\n if (ctx) {\n anchors.anchorIsValid(this.source);\n if (ctx.options.verifyAliasOrder && !ctx.anchors.has(this.source)) {\n const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;\n throw new Error(msg);\n }\n if (ctx.implicitKey)\n return `${src} `;\n }\n return src;\n }\n}\nfunction getAliasCount(doc, node, anchors) {\n if (Node.isAlias(node)) {\n const source = node.resolve(doc);\n const anchor = anchors && source && anchors.get(source);\n return anchor ? anchor.count * anchor.aliasCount : 0;\n }\n else if (Node.isCollection(node)) {\n let count = 0;\n for (const item of node.items) {\n const c = getAliasCount(doc, item, anchors);\n if (c > count)\n count = c;\n }\n return count;\n }\n else if (Node.isPair(node)) {\n const kc = getAliasCount(doc, node.key, anchors);\n const vc = getAliasCount(doc, node.value, anchors);\n return Math.max(kc, vc);\n }\n return 1;\n}\n\nexports.Alias = Alias;\n","'use strict';\n\nvar createNode = require('../doc/createNode.js');\nvar Node = require('./Node.js');\n\nfunction collectionFromPath(schema, path, value) {\n let v = value;\n for (let i = path.length - 1; i >= 0; --i) {\n const k = path[i];\n if (typeof k === 'number' && Number.isInteger(k) && k >= 0) {\n const a = [];\n a[k] = v;\n v = a;\n }\n else {\n v = new Map([[k, v]]);\n }\n }\n return createNode.createNode(v, undefined, {\n aliasDuplicateObjects: false,\n keepUndefined: false,\n onAnchor: () => {\n throw new Error('This should not happen, please report a bug.');\n },\n schema,\n sourceObjects: new Map()\n });\n}\n// Type guard is intentionally a little wrong so as to be more useful,\n// as it does not cover untypable empty non-string iterables (e.g. []).\nconst isEmptyPath = (path) => path == null ||\n (typeof path === 'object' && !!path[Symbol.iterator]().next().done);\nclass Collection extends Node.NodeBase {\n constructor(type, schema) {\n super(type);\n Object.defineProperty(this, 'schema', {\n value: schema,\n configurable: true,\n enumerable: false,\n writable: true\n });\n }\n /**\n * Create a copy of this collection.\n *\n * @param schema - If defined, overwrites the original's schema\n */\n clone(schema) {\n const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));\n if (schema)\n copy.schema = schema;\n copy.items = copy.items.map(it => Node.isNode(it) || Node.isPair(it) ? it.clone(schema) : it);\n if (this.range)\n copy.range = this.range.slice();\n return copy;\n }\n /**\n * Adds a value to the collection. For `!!map` and `!!omap` the value must\n * be a Pair instance or a `{ key, value }` object, which may not have a key\n * that already exists in the map.\n */\n addIn(path, value) {\n if (isEmptyPath(path))\n this.add(value);\n else {\n const [key, ...rest] = path;\n const node = this.get(key, true);\n if (Node.isCollection(node))\n node.addIn(rest, value);\n else if (node === undefined && this.schema)\n this.set(key, collectionFromPath(this.schema, rest, value));\n else\n throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);\n }\n }\n /**\n * Removes a value from the collection.\n * @returns `true` if the item was found and removed.\n */\n deleteIn(path) {\n const [key, ...rest] = path;\n if (rest.length === 0)\n return this.delete(key);\n const node = this.get(key, true);\n if (Node.isCollection(node))\n return node.deleteIn(rest);\n else\n throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);\n }\n /**\n * Returns item at `key`, or `undefined` if not found. By default unwraps\n * scalar values from their surrounding node; to disable set `keepScalar` to\n * `true` (collections are always returned intact).\n */\n getIn(path, keepScalar) {\n const [key, ...rest] = path;\n const node = this.get(key, true);\n if (rest.length === 0)\n return !keepScalar && Node.isScalar(node) ? node.value : node;\n else\n return Node.isCollection(node) ? node.getIn(rest, keepScalar) : undefined;\n }\n hasAllNullValues(allowScalar) {\n return this.items.every(node => {\n if (!Node.isPair(node))\n return false;\n const n = node.value;\n return (n == null ||\n (allowScalar &&\n Node.isScalar(n) &&\n n.value == null &&\n !n.commentBefore &&\n !n.comment &&\n !n.tag));\n });\n }\n /**\n * Checks if the collection includes a value with the key `key`.\n */\n hasIn(path) {\n const [key, ...rest] = path;\n if (rest.length === 0)\n return this.has(key);\n const node = this.get(key, true);\n return Node.isCollection(node) ? node.hasIn(rest) : false;\n }\n /**\n * Sets a value in this collection. For `!!set`, `value` needs to be a\n * boolean to add/remove the item from the set.\n */\n setIn(path, value) {\n const [key, ...rest] = path;\n if (rest.length === 0) {\n this.set(key, value);\n }\n else {\n const node = this.get(key, true);\n if (Node.isCollection(node))\n node.setIn(rest, value);\n else if (node === undefined && this.schema)\n this.set(key, collectionFromPath(this.schema, rest, value));\n else\n throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);\n }\n }\n}\nCollection.maxFlowStringSingleLineLength = 60;\n\nexports.Collection = Collection;\nexports.collectionFromPath = collectionFromPath;\nexports.isEmptyPath = isEmptyPath;\n","'use strict';\n\nconst ALIAS = Symbol.for('yaml.alias');\nconst DOC = Symbol.for('yaml.document');\nconst MAP = Symbol.for('yaml.map');\nconst PAIR = Symbol.for('yaml.pair');\nconst SCALAR = Symbol.for('yaml.scalar');\nconst SEQ = Symbol.for('yaml.seq');\nconst NODE_TYPE = Symbol.for('yaml.node.type');\nconst isAlias = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === ALIAS;\nconst isDocument = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === DOC;\nconst isMap = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === MAP;\nconst isPair = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === PAIR;\nconst isScalar = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SCALAR;\nconst isSeq = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SEQ;\nfunction isCollection(node) {\n if (node && typeof node === 'object')\n switch (node[NODE_TYPE]) {\n case MAP:\n case SEQ:\n return true;\n }\n return false;\n}\nfunction isNode(node) {\n if (node && typeof node === 'object')\n switch (node[NODE_TYPE]) {\n case ALIAS:\n case MAP:\n case SCALAR:\n case SEQ:\n return true;\n }\n return false;\n}\nconst hasAnchor = (node) => (isScalar(node) || isCollection(node)) && !!node.anchor;\nclass NodeBase {\n constructor(type) {\n Object.defineProperty(this, NODE_TYPE, { value: type });\n }\n /** Create a copy of this node. */\n clone() {\n const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));\n if (this.range)\n copy.range = this.range.slice();\n return copy;\n }\n}\n\nexports.ALIAS = ALIAS;\nexports.DOC = DOC;\nexports.MAP = MAP;\nexports.NODE_TYPE = NODE_TYPE;\nexports.NodeBase = NodeBase;\nexports.PAIR = PAIR;\nexports.SCALAR = SCALAR;\nexports.SEQ = SEQ;\nexports.hasAnchor = hasAnchor;\nexports.isAlias = isAlias;\nexports.isCollection = isCollection;\nexports.isDocument = isDocument;\nexports.isMap = isMap;\nexports.isNode = isNode;\nexports.isPair = isPair;\nexports.isScalar = isScalar;\nexports.isSeq = isSeq;\n","'use strict';\n\nvar createNode = require('../doc/createNode.js');\nvar stringifyPair = require('../stringify/stringifyPair.js');\nvar addPairToJSMap = require('./addPairToJSMap.js');\nvar Node = require('./Node.js');\n\nfunction createPair(key, value, ctx) {\n const k = createNode.createNode(key, undefined, ctx);\n const v = createNode.createNode(value, undefined, ctx);\n return new Pair(k, v);\n}\nclass Pair {\n constructor(key, value = null) {\n Object.defineProperty(this, Node.NODE_TYPE, { value: Node.PAIR });\n this.key = key;\n this.value = value;\n }\n clone(schema) {\n let { key, value } = this;\n if (Node.isNode(key))\n key = key.clone(schema);\n if (Node.isNode(value))\n value = value.clone(schema);\n return new Pair(key, value);\n }\n toJSON(_, ctx) {\n const pair = ctx?.mapAsMap ? new Map() : {};\n return addPairToJSMap.addPairToJSMap(ctx, pair, this);\n }\n toString(ctx, onComment, onChompKeep) {\n return ctx?.doc\n ? stringifyPair.stringifyPair(this, ctx, onComment, onChompKeep)\n : JSON.stringify(this);\n }\n}\n\nexports.Pair = Pair;\nexports.createPair = createPair;\n","'use strict';\n\nvar Node = require('./Node.js');\nvar toJS = require('./toJS.js');\n\nconst isScalarValue = (value) => !value || (typeof value !== 'function' && typeof value !== 'object');\nclass Scalar extends Node.NodeBase {\n constructor(value) {\n super(Node.SCALAR);\n this.value = value;\n }\n toJSON(arg, ctx) {\n return ctx?.keep ? this.value : toJS.toJS(this.value, arg, ctx);\n }\n toString() {\n return String(this.value);\n }\n}\nScalar.BLOCK_FOLDED = 'BLOCK_FOLDED';\nScalar.BLOCK_LITERAL = 'BLOCK_LITERAL';\nScalar.PLAIN = 'PLAIN';\nScalar.QUOTE_DOUBLE = 'QUOTE_DOUBLE';\nScalar.QUOTE_SINGLE = 'QUOTE_SINGLE';\n\nexports.Scalar = Scalar;\nexports.isScalarValue = isScalarValue;\n","'use strict';\n\nvar stringifyCollection = require('../stringify/stringifyCollection.js');\nvar addPairToJSMap = require('./addPairToJSMap.js');\nvar Collection = require('./Collection.js');\nvar Node = require('./Node.js');\nvar Pair = require('./Pair.js');\nvar Scalar = require('./Scalar.js');\n\nfunction findPair(items, key) {\n const k = Node.isScalar(key) ? key.value : key;\n for (const it of items) {\n if (Node.isPair(it)) {\n if (it.key === key || it.key === k)\n return it;\n if (Node.isScalar(it.key) && it.key.value === k)\n return it;\n }\n }\n return undefined;\n}\nclass YAMLMap extends Collection.Collection {\n constructor(schema) {\n super(Node.MAP, schema);\n this.items = [];\n }\n static get tagName() {\n return 'tag:yaml.org,2002:map';\n }\n /**\n * Adds a value to the collection.\n *\n * @param overwrite - If not set `true`, using a key that is already in the\n * collection will throw. Otherwise, overwrites the previous value.\n */\n add(pair, overwrite) {\n let _pair;\n if (Node.isPair(pair))\n _pair = pair;\n else if (!pair || typeof pair !== 'object' || !('key' in pair)) {\n // In TypeScript, this never happens.\n _pair = new Pair.Pair(pair, pair?.value);\n }\n else\n _pair = new Pair.Pair(pair.key, pair.value);\n const prev = findPair(this.items, _pair.key);\n const sortEntries = this.schema?.sortMapEntries;\n if (prev) {\n if (!overwrite)\n throw new Error(`Key ${_pair.key} already set`);\n // For scalars, keep the old node & its comments and anchors\n if (Node.isScalar(prev.value) && Scalar.isScalarValue(_pair.value))\n prev.value.value = _pair.value;\n else\n prev.value = _pair.value;\n }\n else if (sortEntries) {\n const i = this.items.findIndex(item => sortEntries(_pair, item) < 0);\n if (i === -1)\n this.items.push(_pair);\n else\n this.items.splice(i, 0, _pair);\n }\n else {\n this.items.push(_pair);\n }\n }\n delete(key) {\n const it = findPair(this.items, key);\n if (!it)\n return false;\n const del = this.items.splice(this.items.indexOf(it), 1);\n return del.length > 0;\n }\n get(key, keepScalar) {\n const it = findPair(this.items, key);\n const node = it?.value;\n return (!keepScalar && Node.isScalar(node) ? node.value : node) ?? undefined;\n }\n has(key) {\n return !!findPair(this.items, key);\n }\n set(key, value) {\n this.add(new Pair.Pair(key, value), true);\n }\n /**\n * @param ctx - Conversion context, originally set in Document#toJS()\n * @param {Class} Type - If set, forces the returned collection type\n * @returns Instance of Type, Map, or Object\n */\n toJSON(_, ctx, Type) {\n const map = Type ? new Type() : ctx?.mapAsMap ? new Map() : {};\n if (ctx?.onCreate)\n ctx.onCreate(map);\n for (const item of this.items)\n addPairToJSMap.addPairToJSMap(ctx, map, item);\n return map;\n }\n toString(ctx, onComment, onChompKeep) {\n if (!ctx)\n return JSON.stringify(this);\n for (const item of this.items) {\n if (!Node.isPair(item))\n throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`);\n }\n if (!ctx.allNullValues && this.hasAllNullValues(false))\n ctx = Object.assign({}, ctx, { allNullValues: true });\n return stringifyCollection.stringifyCollection(this, ctx, {\n blockItemPrefix: '',\n flowChars: { start: '{', end: '}' },\n itemIndent: ctx.indent || '',\n onChompKeep,\n onComment\n });\n }\n}\n\nexports.YAMLMap = YAMLMap;\nexports.findPair = findPair;\n","'use strict';\n\nvar stringifyCollection = require('../stringify/stringifyCollection.js');\nvar Collection = require('./Collection.js');\nvar Node = require('./Node.js');\nvar Scalar = require('./Scalar.js');\nvar toJS = require('./toJS.js');\n\nclass YAMLSeq extends Collection.Collection {\n constructor(schema) {\n super(Node.SEQ, schema);\n this.items = [];\n }\n static get tagName() {\n return 'tag:yaml.org,2002:seq';\n }\n add(value) {\n this.items.push(value);\n }\n /**\n * Removes a value from the collection.\n *\n * `key` must contain a representation of an integer for this to succeed.\n * It may be wrapped in a `Scalar`.\n *\n * @returns `true` if the item was found and removed.\n */\n delete(key) {\n const idx = asItemIndex(key);\n if (typeof idx !== 'number')\n return false;\n const del = this.items.splice(idx, 1);\n return del.length > 0;\n }\n get(key, keepScalar) {\n const idx = asItemIndex(key);\n if (typeof idx !== 'number')\n return undefined;\n const it = this.items[idx];\n return !keepScalar && Node.isScalar(it) ? it.value : it;\n }\n /**\n * Checks if the collection includes a value with the key `key`.\n *\n * `key` must contain a representation of an integer for this to succeed.\n * It may be wrapped in a `Scalar`.\n */\n has(key) {\n const idx = asItemIndex(key);\n return typeof idx === 'number' && idx < this.items.length;\n }\n /**\n * Sets a value in this collection. For `!!set`, `value` needs to be a\n * boolean to add/remove the item from the set.\n *\n * If `key` does not contain a representation of an integer, this will throw.\n * It may be wrapped in a `Scalar`.\n */\n set(key, value) {\n const idx = asItemIndex(key);\n if (typeof idx !== 'number')\n throw new Error(`Expected a valid index, not ${key}.`);\n const prev = this.items[idx];\n if (Node.isScalar(prev) && Scalar.isScalarValue(value))\n prev.value = value;\n else\n this.items[idx] = value;\n }\n toJSON(_, ctx) {\n const seq = [];\n if (ctx?.onCreate)\n ctx.onCreate(seq);\n let i = 0;\n for (const item of this.items)\n seq.push(toJS.toJS(item, String(i++), ctx));\n return seq;\n }\n toString(ctx, onComment, onChompKeep) {\n if (!ctx)\n return JSON.stringify(this);\n return stringifyCollection.stringifyCollection(this, ctx, {\n blockItemPrefix: '- ',\n flowChars: { start: '[', end: ']' },\n itemIndent: (ctx.indent || '') + ' ',\n onChompKeep,\n onComment\n });\n }\n}\nfunction asItemIndex(key) {\n let idx = Node.isScalar(key) ? key.value : key;\n if (idx && typeof idx === 'string')\n idx = Number(idx);\n return typeof idx === 'number' && Number.isInteger(idx) && idx >= 0\n ? idx\n : null;\n}\n\nexports.YAMLSeq = YAMLSeq;\n","'use strict';\n\nvar log = require('../log.js');\nvar stringify = require('../stringify/stringify.js');\nvar Node = require('./Node.js');\nvar Scalar = require('./Scalar.js');\nvar toJS = require('./toJS.js');\n\nconst MERGE_KEY = '<<';\nfunction addPairToJSMap(ctx, map, { key, value }) {\n if (ctx?.doc.schema.merge && isMergeKey(key)) {\n value = Node.isAlias(value) ? value.resolve(ctx.doc) : value;\n if (Node.isSeq(value))\n for (const it of value.items)\n mergeToJSMap(ctx, map, it);\n else if (Array.isArray(value))\n for (const it of value)\n mergeToJSMap(ctx, map, it);\n else\n mergeToJSMap(ctx, map, value);\n }\n else {\n const jsKey = toJS.toJS(key, '', ctx);\n if (map instanceof Map) {\n map.set(jsKey, toJS.toJS(value, jsKey, ctx));\n }\n else if (map instanceof Set) {\n map.add(jsKey);\n }\n else {\n const stringKey = stringifyKey(key, jsKey, ctx);\n const jsValue = toJS.toJS(value, stringKey, ctx);\n if (stringKey in map)\n Object.defineProperty(map, stringKey, {\n value: jsValue,\n writable: true,\n enumerable: true,\n configurable: true\n });\n else\n map[stringKey] = jsValue;\n }\n }\n return map;\n}\nconst isMergeKey = (key) => key === MERGE_KEY ||\n (Node.isScalar(key) &&\n key.value === MERGE_KEY &&\n (!key.type || key.type === Scalar.Scalar.PLAIN));\n// If the value associated with a merge key is a single mapping node, each of\n// its key/value pairs is inserted into the current mapping, unless the key\n// already exists in it. If the value associated with the merge key is a\n// sequence, then this sequence is expected to contain mapping nodes and each\n// of these nodes is merged in turn according to its order in the sequence.\n// Keys in mapping nodes earlier in the sequence override keys specified in\n// later mapping nodes. -- http://yaml.org/type/merge.html\nfunction mergeToJSMap(ctx, map, value) {\n const source = ctx && Node.isAlias(value) ? value.resolve(ctx.doc) : value;\n if (!Node.isMap(source))\n throw new Error('Merge sources must be maps or map aliases');\n const srcMap = source.toJSON(null, ctx, Map);\n for (const [key, value] of srcMap) {\n if (map instanceof Map) {\n if (!map.has(key))\n map.set(key, value);\n }\n else if (map instanceof Set) {\n map.add(key);\n }\n else if (!Object.prototype.hasOwnProperty.call(map, key)) {\n Object.defineProperty(map, key, {\n value,\n writable: true,\n enumerable: true,\n configurable: true\n });\n }\n }\n return map;\n}\nfunction stringifyKey(key, jsKey, ctx) {\n if (jsKey === null)\n return '';\n if (typeof jsKey !== 'object')\n return String(jsKey);\n if (Node.isNode(key) && ctx && ctx.doc) {\n const strCtx = stringify.createStringifyContext(ctx.doc, {});\n strCtx.anchors = new Set();\n for (const node of ctx.anchors.keys())\n strCtx.anchors.add(node.anchor);\n strCtx.inFlow = true;\n strCtx.inStringifyKey = true;\n const strKey = key.toString(strCtx);\n if (!ctx.mapKeyWarned) {\n let jsonStr = JSON.stringify(strKey);\n if (jsonStr.length > 40)\n jsonStr = jsonStr.substring(0, 36) + '...\"';\n log.warn(ctx.doc.options.logLevel, `Keys with collection values will be stringified due to JS Object restrictions: ${jsonStr}. Set mapAsMap: true to use object keys.`);\n ctx.mapKeyWarned = true;\n }\n return strKey;\n }\n return JSON.stringify(jsKey);\n}\n\nexports.addPairToJSMap = addPairToJSMap;\n","'use strict';\n\nvar Node = require('./Node.js');\n\n/**\n * Recursively convert any node or its contents to native JavaScript\n *\n * @param value - The input value\n * @param arg - If `value` defines a `toJSON()` method, use this\n * as its first argument\n * @param ctx - Conversion context, originally set in Document#toJS(). If\n * `{ keep: true }` is not set, output should be suitable for JSON\n * stringification.\n */\nfunction toJS(value, arg, ctx) {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-return\n if (Array.isArray(value))\n return value.map((v, i) => toJS(v, String(i), ctx));\n if (value && typeof value.toJSON === 'function') {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-call\n if (!ctx || !Node.hasAnchor(value))\n return value.toJSON(arg, ctx);\n const data = { aliasCount: 0, count: 1, res: undefined };\n ctx.anchors.set(value, data);\n ctx.onCreate = res => {\n data.res = res;\n delete ctx.onCreate;\n };\n const res = value.toJSON(arg, ctx);\n if (ctx.onCreate)\n ctx.onCreate(res);\n return res;\n }\n if (typeof value === 'bigint' && !ctx?.keep)\n return Number(value);\n return value;\n}\n\nexports.toJS = toJS;\n","'use strict';\n\nvar resolveBlockScalar = require('../compose/resolve-block-scalar.js');\nvar resolveFlowScalar = require('../compose/resolve-flow-scalar.js');\nvar errors = require('../errors.js');\nvar stringifyString = require('../stringify/stringifyString.js');\n\nfunction resolveAsScalar(token, strict = true, onError) {\n if (token) {\n const _onError = (pos, code, message) => {\n const offset = typeof pos === 'number' ? pos : Array.isArray(pos) ? pos[0] : pos.offset;\n if (onError)\n onError(offset, code, message);\n else\n throw new errors.YAMLParseError([offset, offset + 1], code, message);\n };\n switch (token.type) {\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n return resolveFlowScalar.resolveFlowScalar(token, strict, _onError);\n case 'block-scalar':\n return resolveBlockScalar.resolveBlockScalar(token, strict, _onError);\n }\n }\n return null;\n}\n/**\n * Create a new scalar token with `value`\n *\n * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,\n * as this function does not support any schema operations and won't check for such conflicts.\n *\n * @param value The string representation of the value, which will have its content properly indented.\n * @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added.\n * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.\n * @param context.indent The indent level of the token.\n * @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value.\n * @param context.offset The offset position of the token.\n * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.\n */\nfunction createScalarToken(value, context) {\n const { implicitKey = false, indent, inFlow = false, offset = -1, type = 'PLAIN' } = context;\n const source = stringifyString.stringifyString({ type, value }, {\n implicitKey,\n indent: indent > 0 ? ' '.repeat(indent) : '',\n inFlow,\n options: { blockQuote: true, lineWidth: -1 }\n });\n const end = context.end ?? [\n { type: 'newline', offset: -1, indent, source: '\\n' }\n ];\n switch (source[0]) {\n case '|':\n case '>': {\n const he = source.indexOf('\\n');\n const head = source.substring(0, he);\n const body = source.substring(he + 1) + '\\n';\n const props = [\n { type: 'block-scalar-header', offset, indent, source: head }\n ];\n if (!addEndtoBlockProps(props, end))\n props.push({ type: 'newline', offset: -1, indent, source: '\\n' });\n return { type: 'block-scalar', offset, indent, props, source: body };\n }\n case '\"':\n return { type: 'double-quoted-scalar', offset, indent, source, end };\n case \"'\":\n return { type: 'single-quoted-scalar', offset, indent, source, end };\n default:\n return { type: 'scalar', offset, indent, source, end };\n }\n}\n/**\n * Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have.\n *\n * Best efforts are made to retain any comments previously associated with the `token`,\n * though all contents within a collection's `items` will be overwritten.\n *\n * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,\n * as this function does not support any schema operations and won't check for such conflicts.\n *\n * @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key.\n * @param value The string representation of the value, which will have its content properly indented.\n * @param context.afterKey In most cases, values after a key should have an additional level of indentation.\n * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.\n * @param context.inFlow Being within a flow collection may affect the resolved type of the token's value.\n * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.\n */\nfunction setScalarValue(token, value, context = {}) {\n let { afterKey = false, implicitKey = false, inFlow = false, type } = context;\n let indent = 'indent' in token ? token.indent : null;\n if (afterKey && typeof indent === 'number')\n indent += 2;\n if (!type)\n switch (token.type) {\n case 'single-quoted-scalar':\n type = 'QUOTE_SINGLE';\n break;\n case 'double-quoted-scalar':\n type = 'QUOTE_DOUBLE';\n break;\n case 'block-scalar': {\n const header = token.props[0];\n if (header.type !== 'block-scalar-header')\n throw new Error('Invalid block scalar header');\n type = header.source[0] === '>' ? 'BLOCK_FOLDED' : 'BLOCK_LITERAL';\n break;\n }\n default:\n type = 'PLAIN';\n }\n const source = stringifyString.stringifyString({ type, value }, {\n implicitKey: implicitKey || indent === null,\n indent: indent !== null && indent > 0 ? ' '.repeat(indent) : '',\n inFlow,\n options: { blockQuote: true, lineWidth: -1 }\n });\n switch (source[0]) {\n case '|':\n case '>':\n setBlockScalarValue(token, source);\n break;\n case '\"':\n setFlowScalarValue(token, source, 'double-quoted-scalar');\n break;\n case \"'\":\n setFlowScalarValue(token, source, 'single-quoted-scalar');\n break;\n default:\n setFlowScalarValue(token, source, 'scalar');\n }\n}\nfunction setBlockScalarValue(token, source) {\n const he = source.indexOf('\\n');\n const head = source.substring(0, he);\n const body = source.substring(he + 1) + '\\n';\n if (token.type === 'block-scalar') {\n const header = token.props[0];\n if (header.type !== 'block-scalar-header')\n throw new Error('Invalid block scalar header');\n header.source = head;\n token.source = body;\n }\n else {\n const { offset } = token;\n const indent = 'indent' in token ? token.indent : -1;\n const props = [\n { type: 'block-scalar-header', offset, indent, source: head }\n ];\n if (!addEndtoBlockProps(props, 'end' in token ? token.end : undefined))\n props.push({ type: 'newline', offset: -1, indent, source: '\\n' });\n for (const key of Object.keys(token))\n if (key !== 'type' && key !== 'offset')\n delete token[key];\n Object.assign(token, { type: 'block-scalar', indent, props, source: body });\n }\n}\n/** @returns `true` if last token is a newline */\nfunction addEndtoBlockProps(props, end) {\n if (end)\n for (const st of end)\n switch (st.type) {\n case 'space':\n case 'comment':\n props.push(st);\n break;\n case 'newline':\n props.push(st);\n return true;\n }\n return false;\n}\nfunction setFlowScalarValue(token, source, type) {\n switch (token.type) {\n case 'scalar':\n case 'double-quoted-scalar':\n case 'single-quoted-scalar':\n token.type = type;\n token.source = source;\n break;\n case 'block-scalar': {\n const end = token.props.slice(1);\n let oa = source.length;\n if (token.props[0].type === 'block-scalar-header')\n oa -= token.props[0].source.length;\n for (const tok of end)\n tok.offset += oa;\n delete token.props;\n Object.assign(token, { type, source, end });\n break;\n }\n case 'block-map':\n case 'block-seq': {\n const offset = token.offset + source.length;\n const nl = { type: 'newline', offset, indent: token.indent, source: '\\n' };\n delete token.items;\n Object.assign(token, { type, source, end: [nl] });\n break;\n }\n default: {\n const indent = 'indent' in token ? token.indent : -1;\n const end = 'end' in token && Array.isArray(token.end)\n ? token.end.filter(st => st.type === 'space' ||\n st.type === 'comment' ||\n st.type === 'newline')\n : [];\n for (const key of Object.keys(token))\n if (key !== 'type' && key !== 'offset')\n delete token[key];\n Object.assign(token, { type, indent, source, end });\n }\n }\n}\n\nexports.createScalarToken = createScalarToken;\nexports.resolveAsScalar = resolveAsScalar;\nexports.setScalarValue = setScalarValue;\n","'use strict';\n\n/**\n * Stringify a CST document, token, or collection item\n *\n * Fair warning: This applies no validation whatsoever, and\n * simply concatenates the sources in their logical order.\n */\nconst stringify = (cst) => 'type' in cst ? stringifyToken(cst) : stringifyItem(cst);\nfunction stringifyToken(token) {\n switch (token.type) {\n case 'block-scalar': {\n let res = '';\n for (const tok of token.props)\n res += stringifyToken(tok);\n return res + token.source;\n }\n case 'block-map':\n case 'block-seq': {\n let res = '';\n for (const item of token.items)\n res += stringifyItem(item);\n return res;\n }\n case 'flow-collection': {\n let res = token.start.source;\n for (const item of token.items)\n res += stringifyItem(item);\n for (const st of token.end)\n res += st.source;\n return res;\n }\n case 'document': {\n let res = stringifyItem(token);\n if (token.end)\n for (const st of token.end)\n res += st.source;\n return res;\n }\n default: {\n let res = token.source;\n if ('end' in token && token.end)\n for (const st of token.end)\n res += st.source;\n return res;\n }\n }\n}\nfunction stringifyItem({ start, key, sep, value }) {\n let res = '';\n for (const st of start)\n res += st.source;\n if (key)\n res += stringifyToken(key);\n if (sep)\n for (const st of sep)\n res += st.source;\n if (value)\n res += stringifyToken(value);\n return res;\n}\n\nexports.stringify = stringify;\n","'use strict';\n\nconst BREAK = Symbol('break visit');\nconst SKIP = Symbol('skip children');\nconst REMOVE = Symbol('remove item');\n/**\n * Apply a visitor to a CST document or item.\n *\n * Walks through the tree (depth-first) starting from the root, calling a\n * `visitor` function with two arguments when entering each item:\n * - `item`: The current item, which included the following members:\n * - `start: SourceToken[]` – Source tokens before the key or value,\n * possibly including its anchor or tag.\n * - `key?: Token | null` – Set for pair values. May then be `null`, if\n * the key before the `:` separator is empty.\n * - `sep?: SourceToken[]` – Source tokens between the key and the value,\n * which should include the `:` map value indicator if `value` is set.\n * - `value?: Token` – The value of a sequence item, or of a map pair.\n * - `path`: The steps from the root to the current node, as an array of\n * `['key' | 'value', number]` tuples.\n *\n * The return value of the visitor may be used to control the traversal:\n * - `undefined` (default): Do nothing and continue\n * - `visit.SKIP`: Do not visit the children of this token, continue with\n * next sibling\n * - `visit.BREAK`: Terminate traversal completely\n * - `visit.REMOVE`: Remove the current item, then continue with the next one\n * - `number`: Set the index of the next step. This is useful especially if\n * the index of the current token has changed.\n * - `function`: Define the next visitor for this item. After the original\n * visitor is called on item entry, next visitors are called after handling\n * a non-empty `key` and when exiting the item.\n */\nfunction visit(cst, visitor) {\n if ('type' in cst && cst.type === 'document')\n cst = { start: cst.start, value: cst.value };\n _visit(Object.freeze([]), cst, visitor);\n}\n// Without the `as symbol` casts, TS declares these in the `visit`\n// namespace using `var`, but then complains about that because\n// `unique symbol` must be `const`.\n/** Terminate visit traversal completely */\nvisit.BREAK = BREAK;\n/** Do not visit the children of the current item */\nvisit.SKIP = SKIP;\n/** Remove the current item */\nvisit.REMOVE = REMOVE;\n/** Find the item at `path` from `cst` as the root */\nvisit.itemAtPath = (cst, path) => {\n let item = cst;\n for (const [field, index] of path) {\n const tok = item?.[field];\n if (tok && 'items' in tok) {\n item = tok.items[index];\n }\n else\n return undefined;\n }\n return item;\n};\n/**\n * Get the immediate parent collection of the item at `path` from `cst` as the root.\n *\n * Throws an error if the collection is not found, which should never happen if the item itself exists.\n */\nvisit.parentCollection = (cst, path) => {\n const parent = visit.itemAtPath(cst, path.slice(0, -1));\n const field = path[path.length - 1][0];\n const coll = parent?.[field];\n if (coll && 'items' in coll)\n return coll;\n throw new Error('Parent collection not found');\n};\nfunction _visit(path, item, visitor) {\n let ctrl = visitor(item, path);\n if (typeof ctrl === 'symbol')\n return ctrl;\n for (const field of ['key', 'value']) {\n const token = item[field];\n if (token && 'items' in token) {\n for (let i = 0; i < token.items.length; ++i) {\n const ci = _visit(Object.freeze(path.concat([[field, i]])), token.items[i], visitor);\n if (typeof ci === 'number')\n i = ci - 1;\n else if (ci === BREAK)\n return BREAK;\n else if (ci === REMOVE) {\n token.items.splice(i, 1);\n i -= 1;\n }\n }\n if (typeof ctrl === 'function' && field === 'key')\n ctrl = ctrl(item, path);\n }\n }\n return typeof ctrl === 'function' ? ctrl(item, path) : ctrl;\n}\n\nexports.visit = visit;\n","'use strict';\n\nvar cstScalar = require('./cst-scalar.js');\nvar cstStringify = require('./cst-stringify.js');\nvar cstVisit = require('./cst-visit.js');\n\n/** The byte order mark */\nconst BOM = '\\u{FEFF}';\n/** Start of doc-mode */\nconst DOCUMENT = '\\x02'; // C0: Start of Text\n/** Unexpected end of flow-mode */\nconst FLOW_END = '\\x18'; // C0: Cancel\n/** Next token is a scalar value */\nconst SCALAR = '\\x1f'; // C0: Unit Separator\n/** @returns `true` if `token` is a flow or block collection */\nconst isCollection = (token) => !!token && 'items' in token;\n/** @returns `true` if `token` is a flow or block scalar; not an alias */\nconst isScalar = (token) => !!token &&\n (token.type === 'scalar' ||\n token.type === 'single-quoted-scalar' ||\n token.type === 'double-quoted-scalar' ||\n token.type === 'block-scalar');\n/* istanbul ignore next */\n/** Get a printable representation of a lexer token */\nfunction prettyToken(token) {\n switch (token) {\n case BOM:\n return '';\n case DOCUMENT:\n return '';\n case FLOW_END:\n return '';\n case SCALAR:\n return '';\n default:\n return JSON.stringify(token);\n }\n}\n/** Identify the type of a lexer token. May return `null` for unknown tokens. */\nfunction tokenType(source) {\n switch (source) {\n case BOM:\n return 'byte-order-mark';\n case DOCUMENT:\n return 'doc-mode';\n case FLOW_END:\n return 'flow-error-end';\n case SCALAR:\n return 'scalar';\n case '---':\n return 'doc-start';\n case '...':\n return 'doc-end';\n case '':\n case '\\n':\n case '\\r\\n':\n return 'newline';\n case '-':\n return 'seq-item-ind';\n case '?':\n return 'explicit-key-ind';\n case ':':\n return 'map-value-ind';\n case '{':\n return 'flow-map-start';\n case '}':\n return 'flow-map-end';\n case '[':\n return 'flow-seq-start';\n case ']':\n return 'flow-seq-end';\n case ',':\n return 'comma';\n }\n switch (source[0]) {\n case ' ':\n case '\\t':\n return 'space';\n case '#':\n return 'comment';\n case '%':\n return 'directive-line';\n case '*':\n return 'alias';\n case '&':\n return 'anchor';\n case '!':\n return 'tag';\n case \"'\":\n return 'single-quoted-scalar';\n case '\"':\n return 'double-quoted-scalar';\n case '|':\n case '>':\n return 'block-scalar-header';\n }\n return null;\n}\n\nexports.createScalarToken = cstScalar.createScalarToken;\nexports.resolveAsScalar = cstScalar.resolveAsScalar;\nexports.setScalarValue = cstScalar.setScalarValue;\nexports.stringify = cstStringify.stringify;\nexports.visit = cstVisit.visit;\nexports.BOM = BOM;\nexports.DOCUMENT = DOCUMENT;\nexports.FLOW_END = FLOW_END;\nexports.SCALAR = SCALAR;\nexports.isCollection = isCollection;\nexports.isScalar = isScalar;\nexports.prettyToken = prettyToken;\nexports.tokenType = tokenType;\n","'use strict';\n\nvar cst = require('./cst.js');\n\n/*\nSTART -> stream\n\nstream\n directive -> line-end -> stream\n indent + line-end -> stream\n [else] -> line-start\n\nline-end\n comment -> line-end\n newline -> .\n input-end -> END\n\nline-start\n doc-start -> doc\n doc-end -> stream\n [else] -> indent -> block-start\n\nblock-start\n seq-item-start -> block-start\n explicit-key-start -> block-start\n map-value-start -> block-start\n [else] -> doc\n\ndoc\n line-end -> line-start\n spaces -> doc\n anchor -> doc\n tag -> doc\n flow-start -> flow -> doc\n flow-end -> error -> doc\n seq-item-start -> error -> doc\n explicit-key-start -> error -> doc\n map-value-start -> doc\n alias -> doc\n quote-start -> quoted-scalar -> doc\n block-scalar-header -> line-end -> block-scalar(min) -> line-start\n [else] -> plain-scalar(false, min) -> doc\n\nflow\n line-end -> flow\n spaces -> flow\n anchor -> flow\n tag -> flow\n flow-start -> flow -> flow\n flow-end -> .\n seq-item-start -> error -> flow\n explicit-key-start -> flow\n map-value-start -> flow\n alias -> flow\n quote-start -> quoted-scalar -> flow\n comma -> flow\n [else] -> plain-scalar(true, 0) -> flow\n\nquoted-scalar\n quote-end -> .\n [else] -> quoted-scalar\n\nblock-scalar(min)\n newline + peek(indent < min) -> .\n [else] -> block-scalar(min)\n\nplain-scalar(is-flow, min)\n scalar-end(is-flow) -> .\n peek(newline + (indent < min)) -> .\n [else] -> plain-scalar(min)\n*/\nfunction isEmpty(ch) {\n switch (ch) {\n case undefined:\n case ' ':\n case '\\n':\n case '\\r':\n case '\\t':\n return true;\n default:\n return false;\n }\n}\nconst hexDigits = '0123456789ABCDEFabcdef'.split('');\nconst tagChars = \"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()\".split('');\nconst invalidFlowScalarChars = ',[]{}'.split('');\nconst invalidAnchorChars = ' ,[]{}\\n\\r\\t'.split('');\nconst isNotAnchorChar = (ch) => !ch || invalidAnchorChars.includes(ch);\n/**\n * Splits an input string into lexical tokens, i.e. smaller strings that are\n * easily identifiable by `tokens.tokenType()`.\n *\n * Lexing starts always in a \"stream\" context. Incomplete input may be buffered\n * until a complete token can be emitted.\n *\n * In addition to slices of the original input, the following control characters\n * may also be emitted:\n *\n * - `\\x02` (Start of Text): A document starts with the next token\n * - `\\x18` (Cancel): Unexpected end of flow-mode (indicates an error)\n * - `\\x1f` (Unit Separator): Next token is a scalar value\n * - `\\u{FEFF}` (Byte order mark): Emitted separately outside documents\n */\nclass Lexer {\n constructor() {\n /**\n * Flag indicating whether the end of the current buffer marks the end of\n * all input\n */\n this.atEnd = false;\n /**\n * Explicit indent set in block scalar header, as an offset from the current\n * minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not\n * explicitly set.\n */\n this.blockScalarIndent = -1;\n /**\n * Block scalars that include a + (keep) chomping indicator in their header\n * include trailing empty lines, which are otherwise excluded from the\n * scalar's contents.\n */\n this.blockScalarKeep = false;\n /** Current input */\n this.buffer = '';\n /**\n * Flag noting whether the map value indicator : can immediately follow this\n * node within a flow context.\n */\n this.flowKey = false;\n /** Count of surrounding flow collection levels. */\n this.flowLevel = 0;\n /**\n * Minimum level of indentation required for next lines to be parsed as a\n * part of the current scalar value.\n */\n this.indentNext = 0;\n /** Indentation level of the current line. */\n this.indentValue = 0;\n /** Position of the next \\n character. */\n this.lineEndPos = null;\n /** Stores the state of the lexer if reaching the end of incpomplete input */\n this.next = null;\n /** A pointer to `buffer`; the current position of the lexer. */\n this.pos = 0;\n }\n /**\n * Generate YAML tokens from the `source` string. If `incomplete`,\n * a part of the last line may be left as a buffer for the next call.\n *\n * @returns A generator of lexical tokens\n */\n *lex(source, incomplete = false) {\n if (source) {\n this.buffer = this.buffer ? this.buffer + source : source;\n this.lineEndPos = null;\n }\n this.atEnd = !incomplete;\n let next = this.next ?? 'stream';\n while (next && (incomplete || this.hasChars(1)))\n next = yield* this.parseNext(next);\n }\n atLineEnd() {\n let i = this.pos;\n let ch = this.buffer[i];\n while (ch === ' ' || ch === '\\t')\n ch = this.buffer[++i];\n if (!ch || ch === '#' || ch === '\\n')\n return true;\n if (ch === '\\r')\n return this.buffer[i + 1] === '\\n';\n return false;\n }\n charAt(n) {\n return this.buffer[this.pos + n];\n }\n continueScalar(offset) {\n let ch = this.buffer[offset];\n if (this.indentNext > 0) {\n let indent = 0;\n while (ch === ' ')\n ch = this.buffer[++indent + offset];\n if (ch === '\\r') {\n const next = this.buffer[indent + offset + 1];\n if (next === '\\n' || (!next && !this.atEnd))\n return offset + indent + 1;\n }\n return ch === '\\n' || indent >= this.indentNext || (!ch && !this.atEnd)\n ? offset + indent\n : -1;\n }\n if (ch === '-' || ch === '.') {\n const dt = this.buffer.substr(offset, 3);\n if ((dt === '---' || dt === '...') && isEmpty(this.buffer[offset + 3]))\n return -1;\n }\n return offset;\n }\n getLine() {\n let end = this.lineEndPos;\n if (typeof end !== 'number' || (end !== -1 && end < this.pos)) {\n end = this.buffer.indexOf('\\n', this.pos);\n this.lineEndPos = end;\n }\n if (end === -1)\n return this.atEnd ? this.buffer.substring(this.pos) : null;\n if (this.buffer[end - 1] === '\\r')\n end -= 1;\n return this.buffer.substring(this.pos, end);\n }\n hasChars(n) {\n return this.pos + n <= this.buffer.length;\n }\n setNext(state) {\n this.buffer = this.buffer.substring(this.pos);\n this.pos = 0;\n this.lineEndPos = null;\n this.next = state;\n return null;\n }\n peek(n) {\n return this.buffer.substr(this.pos, n);\n }\n *parseNext(next) {\n switch (next) {\n case 'stream':\n return yield* this.parseStream();\n case 'line-start':\n return yield* this.parseLineStart();\n case 'block-start':\n return yield* this.parseBlockStart();\n case 'doc':\n return yield* this.parseDocument();\n case 'flow':\n return yield* this.parseFlowCollection();\n case 'quoted-scalar':\n return yield* this.parseQuotedScalar();\n case 'block-scalar':\n return yield* this.parseBlockScalar();\n case 'plain-scalar':\n return yield* this.parsePlainScalar();\n }\n }\n *parseStream() {\n let line = this.getLine();\n if (line === null)\n return this.setNext('stream');\n if (line[0] === cst.BOM) {\n yield* this.pushCount(1);\n line = line.substring(1);\n }\n if (line[0] === '%') {\n let dirEnd = line.length;\n const cs = line.indexOf('#');\n if (cs !== -1) {\n const ch = line[cs - 1];\n if (ch === ' ' || ch === '\\t')\n dirEnd = cs - 1;\n }\n while (true) {\n const ch = line[dirEnd - 1];\n if (ch === ' ' || ch === '\\t')\n dirEnd -= 1;\n else\n break;\n }\n const n = (yield* this.pushCount(dirEnd)) + (yield* this.pushSpaces(true));\n yield* this.pushCount(line.length - n); // possible comment\n this.pushNewline();\n return 'stream';\n }\n if (this.atLineEnd()) {\n const sp = yield* this.pushSpaces(true);\n yield* this.pushCount(line.length - sp);\n yield* this.pushNewline();\n return 'stream';\n }\n yield cst.DOCUMENT;\n return yield* this.parseLineStart();\n }\n *parseLineStart() {\n const ch = this.charAt(0);\n if (!ch && !this.atEnd)\n return this.setNext('line-start');\n if (ch === '-' || ch === '.') {\n if (!this.atEnd && !this.hasChars(4))\n return this.setNext('line-start');\n const s = this.peek(3);\n if (s === '---' && isEmpty(this.charAt(3))) {\n yield* this.pushCount(3);\n this.indentValue = 0;\n this.indentNext = 0;\n return 'doc';\n }\n else if (s === '...' && isEmpty(this.charAt(3))) {\n yield* this.pushCount(3);\n return 'stream';\n }\n }\n this.indentValue = yield* this.pushSpaces(false);\n if (this.indentNext > this.indentValue && !isEmpty(this.charAt(1)))\n this.indentNext = this.indentValue;\n return yield* this.parseBlockStart();\n }\n *parseBlockStart() {\n const [ch0, ch1] = this.peek(2);\n if (!ch1 && !this.atEnd)\n return this.setNext('block-start');\n if ((ch0 === '-' || ch0 === '?' || ch0 === ':') && isEmpty(ch1)) {\n const n = (yield* this.pushCount(1)) + (yield* this.pushSpaces(true));\n this.indentNext = this.indentValue + 1;\n this.indentValue += n;\n return yield* this.parseBlockStart();\n }\n return 'doc';\n }\n *parseDocument() {\n yield* this.pushSpaces(true);\n const line = this.getLine();\n if (line === null)\n return this.setNext('doc');\n let n = yield* this.pushIndicators();\n switch (line[n]) {\n case '#':\n yield* this.pushCount(line.length - n);\n // fallthrough\n case undefined:\n yield* this.pushNewline();\n return yield* this.parseLineStart();\n case '{':\n case '[':\n yield* this.pushCount(1);\n this.flowKey = false;\n this.flowLevel = 1;\n return 'flow';\n case '}':\n case ']':\n // this is an error\n yield* this.pushCount(1);\n return 'doc';\n case '*':\n yield* this.pushUntil(isNotAnchorChar);\n return 'doc';\n case '\"':\n case \"'\":\n return yield* this.parseQuotedScalar();\n case '|':\n case '>':\n n += yield* this.parseBlockScalarHeader();\n n += yield* this.pushSpaces(true);\n yield* this.pushCount(line.length - n);\n yield* this.pushNewline();\n return yield* this.parseBlockScalar();\n default:\n return yield* this.parsePlainScalar();\n }\n }\n *parseFlowCollection() {\n let nl, sp;\n let indent = -1;\n do {\n nl = yield* this.pushNewline();\n if (nl > 0) {\n sp = yield* this.pushSpaces(false);\n this.indentValue = indent = sp;\n }\n else {\n sp = 0;\n }\n sp += yield* this.pushSpaces(true);\n } while (nl + sp > 0);\n const line = this.getLine();\n if (line === null)\n return this.setNext('flow');\n if ((indent !== -1 && indent < this.indentNext && line[0] !== '#') ||\n (indent === 0 &&\n (line.startsWith('---') || line.startsWith('...')) &&\n isEmpty(line[3]))) {\n // Allowing for the terminal ] or } at the same (rather than greater)\n // indent level as the initial [ or { is technically invalid, but\n // failing here would be surprising to users.\n const atFlowEndMarker = indent === this.indentNext - 1 &&\n this.flowLevel === 1 &&\n (line[0] === ']' || line[0] === '}');\n if (!atFlowEndMarker) {\n // this is an error\n this.flowLevel = 0;\n yield cst.FLOW_END;\n return yield* this.parseLineStart();\n }\n }\n let n = 0;\n while (line[n] === ',') {\n n += yield* this.pushCount(1);\n n += yield* this.pushSpaces(true);\n this.flowKey = false;\n }\n n += yield* this.pushIndicators();\n switch (line[n]) {\n case undefined:\n return 'flow';\n case '#':\n yield* this.pushCount(line.length - n);\n return 'flow';\n case '{':\n case '[':\n yield* this.pushCount(1);\n this.flowKey = false;\n this.flowLevel += 1;\n return 'flow';\n case '}':\n case ']':\n yield* this.pushCount(1);\n this.flowKey = true;\n this.flowLevel -= 1;\n return this.flowLevel ? 'flow' : 'doc';\n case '*':\n yield* this.pushUntil(isNotAnchorChar);\n return 'flow';\n case '\"':\n case \"'\":\n this.flowKey = true;\n return yield* this.parseQuotedScalar();\n case ':': {\n const next = this.charAt(1);\n if (this.flowKey || isEmpty(next) || next === ',') {\n this.flowKey = false;\n yield* this.pushCount(1);\n yield* this.pushSpaces(true);\n return 'flow';\n }\n }\n // fallthrough\n default:\n this.flowKey = false;\n return yield* this.parsePlainScalar();\n }\n }\n *parseQuotedScalar() {\n const quote = this.charAt(0);\n let end = this.buffer.indexOf(quote, this.pos + 1);\n if (quote === \"'\") {\n while (end !== -1 && this.buffer[end + 1] === \"'\")\n end = this.buffer.indexOf(\"'\", end + 2);\n }\n else {\n // double-quote\n while (end !== -1) {\n let n = 0;\n while (this.buffer[end - 1 - n] === '\\\\')\n n += 1;\n if (n % 2 === 0)\n break;\n end = this.buffer.indexOf('\"', end + 1);\n }\n }\n // Only looking for newlines within the quotes\n const qb = this.buffer.substring(0, end);\n let nl = qb.indexOf('\\n', this.pos);\n if (nl !== -1) {\n while (nl !== -1) {\n const cs = this.continueScalar(nl + 1);\n if (cs === -1)\n break;\n nl = qb.indexOf('\\n', cs);\n }\n if (nl !== -1) {\n // this is an error caused by an unexpected unindent\n end = nl - (qb[nl - 1] === '\\r' ? 2 : 1);\n }\n }\n if (end === -1) {\n if (!this.atEnd)\n return this.setNext('quoted-scalar');\n end = this.buffer.length;\n }\n yield* this.pushToIndex(end + 1, false);\n return this.flowLevel ? 'flow' : 'doc';\n }\n *parseBlockScalarHeader() {\n this.blockScalarIndent = -1;\n this.blockScalarKeep = false;\n let i = this.pos;\n while (true) {\n const ch = this.buffer[++i];\n if (ch === '+')\n this.blockScalarKeep = true;\n else if (ch > '0' && ch <= '9')\n this.blockScalarIndent = Number(ch) - 1;\n else if (ch !== '-')\n break;\n }\n return yield* this.pushUntil(ch => isEmpty(ch) || ch === '#');\n }\n *parseBlockScalar() {\n let nl = this.pos - 1; // may be -1 if this.pos === 0\n let indent = 0;\n let ch;\n loop: for (let i = this.pos; (ch = this.buffer[i]); ++i) {\n switch (ch) {\n case ' ':\n indent += 1;\n break;\n case '\\n':\n nl = i;\n indent = 0;\n break;\n case '\\r': {\n const next = this.buffer[i + 1];\n if (!next && !this.atEnd)\n return this.setNext('block-scalar');\n if (next === '\\n')\n break;\n } // fallthrough\n default:\n break loop;\n }\n }\n if (!ch && !this.atEnd)\n return this.setNext('block-scalar');\n if (indent >= this.indentNext) {\n if (this.blockScalarIndent === -1)\n this.indentNext = indent;\n else\n this.indentNext += this.blockScalarIndent;\n do {\n const cs = this.continueScalar(nl + 1);\n if (cs === -1)\n break;\n nl = this.buffer.indexOf('\\n', cs);\n } while (nl !== -1);\n if (nl === -1) {\n if (!this.atEnd)\n return this.setNext('block-scalar');\n nl = this.buffer.length;\n }\n }\n if (!this.blockScalarKeep) {\n do {\n let i = nl - 1;\n let ch = this.buffer[i];\n if (ch === '\\r')\n ch = this.buffer[--i];\n const lastChar = i; // Drop the line if last char not more indented\n while (ch === ' ' || ch === '\\t')\n ch = this.buffer[--i];\n if (ch === '\\n' && i >= this.pos && i + 1 + indent > lastChar)\n nl = i;\n else\n break;\n } while (true);\n }\n yield cst.SCALAR;\n yield* this.pushToIndex(nl + 1, true);\n return yield* this.parseLineStart();\n }\n *parsePlainScalar() {\n const inFlow = this.flowLevel > 0;\n let end = this.pos - 1;\n let i = this.pos - 1;\n let ch;\n while ((ch = this.buffer[++i])) {\n if (ch === ':') {\n const next = this.buffer[i + 1];\n if (isEmpty(next) || (inFlow && next === ','))\n break;\n end = i;\n }\n else if (isEmpty(ch)) {\n let next = this.buffer[i + 1];\n if (ch === '\\r') {\n if (next === '\\n') {\n i += 1;\n ch = '\\n';\n next = this.buffer[i + 1];\n }\n else\n end = i;\n }\n if (next === '#' || (inFlow && invalidFlowScalarChars.includes(next)))\n break;\n if (ch === '\\n') {\n const cs = this.continueScalar(i + 1);\n if (cs === -1)\n break;\n i = Math.max(i, cs - 2); // to advance, but still account for ' #'\n }\n }\n else {\n if (inFlow && invalidFlowScalarChars.includes(ch))\n break;\n end = i;\n }\n }\n if (!ch && !this.atEnd)\n return this.setNext('plain-scalar');\n yield cst.SCALAR;\n yield* this.pushToIndex(end + 1, true);\n return inFlow ? 'flow' : 'doc';\n }\n *pushCount(n) {\n if (n > 0) {\n yield this.buffer.substr(this.pos, n);\n this.pos += n;\n return n;\n }\n return 0;\n }\n *pushToIndex(i, allowEmpty) {\n const s = this.buffer.slice(this.pos, i);\n if (s) {\n yield s;\n this.pos += s.length;\n return s.length;\n }\n else if (allowEmpty)\n yield '';\n return 0;\n }\n *pushIndicators() {\n switch (this.charAt(0)) {\n case '!':\n return ((yield* this.pushTag()) +\n (yield* this.pushSpaces(true)) +\n (yield* this.pushIndicators()));\n case '&':\n return ((yield* this.pushUntil(isNotAnchorChar)) +\n (yield* this.pushSpaces(true)) +\n (yield* this.pushIndicators()));\n case '-': // this is an error\n case '?': // this is an error outside flow collections\n case ':': {\n const inFlow = this.flowLevel > 0;\n const ch1 = this.charAt(1);\n if (isEmpty(ch1) || (inFlow && invalidFlowScalarChars.includes(ch1))) {\n if (!inFlow)\n this.indentNext = this.indentValue + 1;\n else if (this.flowKey)\n this.flowKey = false;\n return ((yield* this.pushCount(1)) +\n (yield* this.pushSpaces(true)) +\n (yield* this.pushIndicators()));\n }\n }\n }\n return 0;\n }\n *pushTag() {\n if (this.charAt(1) === '<') {\n let i = this.pos + 2;\n let ch = this.buffer[i];\n while (!isEmpty(ch) && ch !== '>')\n ch = this.buffer[++i];\n return yield* this.pushToIndex(ch === '>' ? i + 1 : i, false);\n }\n else {\n let i = this.pos + 1;\n let ch = this.buffer[i];\n while (ch) {\n if (tagChars.includes(ch))\n ch = this.buffer[++i];\n else if (ch === '%' &&\n hexDigits.includes(this.buffer[i + 1]) &&\n hexDigits.includes(this.buffer[i + 2])) {\n ch = this.buffer[(i += 3)];\n }\n else\n break;\n }\n return yield* this.pushToIndex(i, false);\n }\n }\n *pushNewline() {\n const ch = this.buffer[this.pos];\n if (ch === '\\n')\n return yield* this.pushCount(1);\n else if (ch === '\\r' && this.charAt(1) === '\\n')\n return yield* this.pushCount(2);\n else\n return 0;\n }\n *pushSpaces(allowTabs) {\n let i = this.pos - 1;\n let ch;\n do {\n ch = this.buffer[++i];\n } while (ch === ' ' || (allowTabs && ch === '\\t'));\n const n = i - this.pos;\n if (n > 0) {\n yield this.buffer.substr(this.pos, n);\n this.pos = i;\n }\n return n;\n }\n *pushUntil(test) {\n let i = this.pos;\n let ch = this.buffer[i];\n while (!test(ch))\n ch = this.buffer[++i];\n return yield* this.pushToIndex(i, false);\n }\n}\n\nexports.Lexer = Lexer;\n","'use strict';\n\n/**\n * Tracks newlines during parsing in order to provide an efficient API for\n * determining the one-indexed `{ line, col }` position for any offset\n * within the input.\n */\nclass LineCounter {\n constructor() {\n this.lineStarts = [];\n /**\n * Should be called in ascending order. Otherwise, call\n * `lineCounter.lineStarts.sort()` before calling `linePos()`.\n */\n this.addNewLine = (offset) => this.lineStarts.push(offset);\n /**\n * Performs a binary search and returns the 1-indexed { line, col }\n * position of `offset`. If `line === 0`, `addNewLine` has never been\n * called or `offset` is before the first known newline.\n */\n this.linePos = (offset) => {\n let low = 0;\n let high = this.lineStarts.length;\n while (low < high) {\n const mid = (low + high) >> 1; // Math.floor((low + high) / 2)\n if (this.lineStarts[mid] < offset)\n low = mid + 1;\n else\n high = mid;\n }\n if (this.lineStarts[low] === offset)\n return { line: low + 1, col: 1 };\n if (low === 0)\n return { line: 0, col: offset };\n const start = this.lineStarts[low - 1];\n return { line: low, col: offset - start + 1 };\n };\n }\n}\n\nexports.LineCounter = LineCounter;\n","'use strict';\n\nvar cst = require('./cst.js');\nvar lexer = require('./lexer.js');\n\nfunction includesToken(list, type) {\n for (let i = 0; i < list.length; ++i)\n if (list[i].type === type)\n return true;\n return false;\n}\nfunction findNonEmptyIndex(list) {\n for (let i = 0; i < list.length; ++i) {\n switch (list[i].type) {\n case 'space':\n case 'comment':\n case 'newline':\n break;\n default:\n return i;\n }\n }\n return -1;\n}\nfunction isFlowToken(token) {\n switch (token?.type) {\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n case 'flow-collection':\n return true;\n default:\n return false;\n }\n}\nfunction getPrevProps(parent) {\n switch (parent.type) {\n case 'document':\n return parent.start;\n case 'block-map': {\n const it = parent.items[parent.items.length - 1];\n return it.sep ?? it.start;\n }\n case 'block-seq':\n return parent.items[parent.items.length - 1].start;\n /* istanbul ignore next should not happen */\n default:\n return [];\n }\n}\n/** Note: May modify input array */\nfunction getFirstKeyStartProps(prev) {\n if (prev.length === 0)\n return [];\n let i = prev.length;\n loop: while (--i >= 0) {\n switch (prev[i].type) {\n case 'doc-start':\n case 'explicit-key-ind':\n case 'map-value-ind':\n case 'seq-item-ind':\n case 'newline':\n break loop;\n }\n }\n while (prev[++i]?.type === 'space') {\n /* loop */\n }\n return prev.splice(i, prev.length);\n}\nfunction fixFlowSeqItems(fc) {\n if (fc.start.type === 'flow-seq-start') {\n for (const it of fc.items) {\n if (it.sep &&\n !it.value &&\n !includesToken(it.start, 'explicit-key-ind') &&\n !includesToken(it.sep, 'map-value-ind')) {\n if (it.key)\n it.value = it.key;\n delete it.key;\n if (isFlowToken(it.value)) {\n if (it.value.end)\n Array.prototype.push.apply(it.value.end, it.sep);\n else\n it.value.end = it.sep;\n }\n else\n Array.prototype.push.apply(it.start, it.sep);\n delete it.sep;\n }\n }\n }\n}\n/**\n * A YAML concrete syntax tree (CST) parser\n *\n * ```ts\n * const src: string = ...\n * for (const token of new Parser().parse(src)) {\n * // token: Token\n * }\n * ```\n *\n * To use the parser with a user-provided lexer:\n *\n * ```ts\n * function* parse(source: string, lexer: Lexer) {\n * const parser = new Parser()\n * for (const lexeme of lexer.lex(source))\n * yield* parser.next(lexeme)\n * yield* parser.end()\n * }\n *\n * const src: string = ...\n * const lexer = new Lexer()\n * for (const token of parse(src, lexer)) {\n * // token: Token\n * }\n * ```\n */\nclass Parser {\n /**\n * @param onNewLine - If defined, called separately with the start position of\n * each new line (in `parse()`, including the start of input).\n */\n constructor(onNewLine) {\n /** If true, space and sequence indicators count as indentation */\n this.atNewLine = true;\n /** If true, next token is a scalar value */\n this.atScalar = false;\n /** Current indentation level */\n this.indent = 0;\n /** Current offset since the start of parsing */\n this.offset = 0;\n /** On the same line with a block map key */\n this.onKeyLine = false;\n /** Top indicates the node that's currently being built */\n this.stack = [];\n /** The source of the current token, set in parse() */\n this.source = '';\n /** The type of the current token, set in parse() */\n this.type = '';\n // Must be defined after `next()`\n this.lexer = new lexer.Lexer();\n this.onNewLine = onNewLine;\n }\n /**\n * Parse `source` as a YAML stream.\n * If `incomplete`, a part of the last line may be left as a buffer for the next call.\n *\n * Errors are not thrown, but yielded as `{ type: 'error', message }` tokens.\n *\n * @returns A generator of tokens representing each directive, document, and other structure.\n */\n *parse(source, incomplete = false) {\n if (this.onNewLine && this.offset === 0)\n this.onNewLine(0);\n for (const lexeme of this.lexer.lex(source, incomplete))\n yield* this.next(lexeme);\n if (!incomplete)\n yield* this.end();\n }\n /**\n * Advance the parser by the `source` of one lexical token.\n */\n *next(source) {\n this.source = source;\n if (process.env.LOG_TOKENS)\n console.log('|', cst.prettyToken(source));\n if (this.atScalar) {\n this.atScalar = false;\n yield* this.step();\n this.offset += source.length;\n return;\n }\n const type = cst.tokenType(source);\n if (!type) {\n const message = `Not a YAML token: ${source}`;\n yield* this.pop({ type: 'error', offset: this.offset, message, source });\n this.offset += source.length;\n }\n else if (type === 'scalar') {\n this.atNewLine = false;\n this.atScalar = true;\n this.type = 'scalar';\n }\n else {\n this.type = type;\n yield* this.step();\n switch (type) {\n case 'newline':\n this.atNewLine = true;\n this.indent = 0;\n if (this.onNewLine)\n this.onNewLine(this.offset + source.length);\n break;\n case 'space':\n if (this.atNewLine && source[0] === ' ')\n this.indent += source.length;\n break;\n case 'explicit-key-ind':\n case 'map-value-ind':\n case 'seq-item-ind':\n if (this.atNewLine)\n this.indent += source.length;\n break;\n case 'doc-mode':\n case 'flow-error-end':\n return;\n default:\n this.atNewLine = false;\n }\n this.offset += source.length;\n }\n }\n /** Call at end of input to push out any remaining constructions */\n *end() {\n while (this.stack.length > 0)\n yield* this.pop();\n }\n get sourceToken() {\n const st = {\n type: this.type,\n offset: this.offset,\n indent: this.indent,\n source: this.source\n };\n return st;\n }\n *step() {\n const top = this.peek(1);\n if (this.type === 'doc-end' && (!top || top.type !== 'doc-end')) {\n while (this.stack.length > 0)\n yield* this.pop();\n this.stack.push({\n type: 'doc-end',\n offset: this.offset,\n source: this.source\n });\n return;\n }\n if (!top)\n return yield* this.stream();\n switch (top.type) {\n case 'document':\n return yield* this.document(top);\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n return yield* this.scalar(top);\n case 'block-scalar':\n return yield* this.blockScalar(top);\n case 'block-map':\n return yield* this.blockMap(top);\n case 'block-seq':\n return yield* this.blockSequence(top);\n case 'flow-collection':\n return yield* this.flowCollection(top);\n case 'doc-end':\n return yield* this.documentEnd(top);\n }\n /* istanbul ignore next should not happen */\n yield* this.pop();\n }\n peek(n) {\n return this.stack[this.stack.length - n];\n }\n *pop(error) {\n const token = error ?? this.stack.pop();\n /* istanbul ignore if should not happen */\n if (!token) {\n const message = 'Tried to pop an empty stack';\n yield { type: 'error', offset: this.offset, source: '', message };\n }\n else if (this.stack.length === 0) {\n yield token;\n }\n else {\n const top = this.peek(1);\n if (token.type === 'block-scalar') {\n // Block scalars use their parent rather than header indent\n token.indent = 'indent' in top ? top.indent : 0;\n }\n else if (token.type === 'flow-collection' && top.type === 'document') {\n // Ignore all indent for top-level flow collections\n token.indent = 0;\n }\n if (token.type === 'flow-collection')\n fixFlowSeqItems(token);\n switch (top.type) {\n case 'document':\n top.value = token;\n break;\n case 'block-scalar':\n top.props.push(token); // error\n break;\n case 'block-map': {\n const it = top.items[top.items.length - 1];\n if (it.value) {\n top.items.push({ start: [], key: token, sep: [] });\n this.onKeyLine = true;\n return;\n }\n else if (it.sep) {\n it.value = token;\n }\n else {\n Object.assign(it, { key: token, sep: [] });\n this.onKeyLine = !includesToken(it.start, 'explicit-key-ind');\n return;\n }\n break;\n }\n case 'block-seq': {\n const it = top.items[top.items.length - 1];\n if (it.value)\n top.items.push({ start: [], value: token });\n else\n it.value = token;\n break;\n }\n case 'flow-collection': {\n const it = top.items[top.items.length - 1];\n if (!it || it.value)\n top.items.push({ start: [], key: token, sep: [] });\n else if (it.sep)\n it.value = token;\n else\n Object.assign(it, { key: token, sep: [] });\n return;\n }\n /* istanbul ignore next should not happen */\n default:\n yield* this.pop();\n yield* this.pop(token);\n }\n if ((top.type === 'document' ||\n top.type === 'block-map' ||\n top.type === 'block-seq') &&\n (token.type === 'block-map' || token.type === 'block-seq')) {\n const last = token.items[token.items.length - 1];\n if (last &&\n !last.sep &&\n !last.value &&\n last.start.length > 0 &&\n findNonEmptyIndex(last.start) === -1 &&\n (token.indent === 0 ||\n last.start.every(st => st.type !== 'comment' || st.indent < token.indent))) {\n if (top.type === 'document')\n top.end = last.start;\n else\n top.items.push({ start: last.start });\n token.items.splice(-1, 1);\n }\n }\n }\n }\n *stream() {\n switch (this.type) {\n case 'directive-line':\n yield { type: 'directive', offset: this.offset, source: this.source };\n return;\n case 'byte-order-mark':\n case 'space':\n case 'comment':\n case 'newline':\n yield this.sourceToken;\n return;\n case 'doc-mode':\n case 'doc-start': {\n const doc = {\n type: 'document',\n offset: this.offset,\n start: []\n };\n if (this.type === 'doc-start')\n doc.start.push(this.sourceToken);\n this.stack.push(doc);\n return;\n }\n }\n yield {\n type: 'error',\n offset: this.offset,\n message: `Unexpected ${this.type} token in YAML stream`,\n source: this.source\n };\n }\n *document(doc) {\n if (doc.value)\n return yield* this.lineEnd(doc);\n switch (this.type) {\n case 'doc-start': {\n if (findNonEmptyIndex(doc.start) !== -1) {\n yield* this.pop();\n yield* this.step();\n }\n else\n doc.start.push(this.sourceToken);\n return;\n }\n case 'anchor':\n case 'tag':\n case 'space':\n case 'comment':\n case 'newline':\n doc.start.push(this.sourceToken);\n return;\n }\n const bv = this.startBlockValue(doc);\n if (bv)\n this.stack.push(bv);\n else {\n yield {\n type: 'error',\n offset: this.offset,\n message: `Unexpected ${this.type} token in YAML document`,\n source: this.source\n };\n }\n }\n *scalar(scalar) {\n if (this.type === 'map-value-ind') {\n const prev = getPrevProps(this.peek(2));\n const start = getFirstKeyStartProps(prev);\n let sep;\n if (scalar.end) {\n sep = scalar.end;\n sep.push(this.sourceToken);\n delete scalar.end;\n }\n else\n sep = [this.sourceToken];\n const map = {\n type: 'block-map',\n offset: scalar.offset,\n indent: scalar.indent,\n items: [{ start, key: scalar, sep }]\n };\n this.onKeyLine = true;\n this.stack[this.stack.length - 1] = map;\n }\n else\n yield* this.lineEnd(scalar);\n }\n *blockScalar(scalar) {\n switch (this.type) {\n case 'space':\n case 'comment':\n case 'newline':\n scalar.props.push(this.sourceToken);\n return;\n case 'scalar':\n scalar.source = this.source;\n // block-scalar source includes trailing newline\n this.atNewLine = true;\n this.indent = 0;\n if (this.onNewLine) {\n let nl = this.source.indexOf('\\n') + 1;\n while (nl !== 0) {\n this.onNewLine(this.offset + nl);\n nl = this.source.indexOf('\\n', nl) + 1;\n }\n }\n yield* this.pop();\n break;\n /* istanbul ignore next should not happen */\n default:\n yield* this.pop();\n yield* this.step();\n }\n }\n *blockMap(map) {\n const it = map.items[map.items.length - 1];\n // it.sep is true-ish if pair already has key or : separator\n switch (this.type) {\n case 'newline':\n this.onKeyLine = false;\n if (it.value) {\n const end = 'end' in it.value ? it.value.end : undefined;\n const last = Array.isArray(end) ? end[end.length - 1] : undefined;\n if (last?.type === 'comment')\n end?.push(this.sourceToken);\n else\n map.items.push({ start: [this.sourceToken] });\n }\n else if (it.sep) {\n it.sep.push(this.sourceToken);\n }\n else {\n it.start.push(this.sourceToken);\n }\n return;\n case 'space':\n case 'comment':\n if (it.value) {\n map.items.push({ start: [this.sourceToken] });\n }\n else if (it.sep) {\n it.sep.push(this.sourceToken);\n }\n else {\n if (this.atIndentedComment(it.start, map.indent)) {\n const prev = map.items[map.items.length - 2];\n const end = prev?.value?.end;\n if (Array.isArray(end)) {\n Array.prototype.push.apply(end, it.start);\n end.push(this.sourceToken);\n map.items.pop();\n return;\n }\n }\n it.start.push(this.sourceToken);\n }\n return;\n }\n if (this.indent >= map.indent) {\n const atNextItem = !this.onKeyLine && this.indent === map.indent && it.sep;\n // For empty nodes, assign newline-separated not indented empty tokens to following node\n let start = [];\n if (atNextItem && it.sep && !it.value) {\n const nl = [];\n for (let i = 0; i < it.sep.length; ++i) {\n const st = it.sep[i];\n switch (st.type) {\n case 'newline':\n nl.push(i);\n break;\n case 'space':\n break;\n case 'comment':\n if (st.indent > map.indent)\n nl.length = 0;\n break;\n default:\n nl.length = 0;\n }\n }\n if (nl.length >= 2)\n start = it.sep.splice(nl[1]);\n }\n switch (this.type) {\n case 'anchor':\n case 'tag':\n if (atNextItem || it.value) {\n start.push(this.sourceToken);\n map.items.push({ start });\n this.onKeyLine = true;\n }\n else if (it.sep) {\n it.sep.push(this.sourceToken);\n }\n else {\n it.start.push(this.sourceToken);\n }\n return;\n case 'explicit-key-ind':\n if (!it.sep && !includesToken(it.start, 'explicit-key-ind')) {\n it.start.push(this.sourceToken);\n }\n else if (atNextItem || it.value) {\n start.push(this.sourceToken);\n map.items.push({ start });\n }\n else {\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start: [this.sourceToken] }]\n });\n }\n this.onKeyLine = true;\n return;\n case 'map-value-ind':\n if (includesToken(it.start, 'explicit-key-ind')) {\n if (!it.sep) {\n if (includesToken(it.start, 'newline')) {\n Object.assign(it, { key: null, sep: [this.sourceToken] });\n }\n else {\n const start = getFirstKeyStartProps(it.start);\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start, key: null, sep: [this.sourceToken] }]\n });\n }\n }\n else if (it.value) {\n map.items.push({ start: [], key: null, sep: [this.sourceToken] });\n }\n else if (includesToken(it.sep, 'map-value-ind')) {\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start, key: null, sep: [this.sourceToken] }]\n });\n }\n else if (isFlowToken(it.key) &&\n !includesToken(it.sep, 'newline')) {\n const start = getFirstKeyStartProps(it.start);\n const key = it.key;\n const sep = it.sep;\n sep.push(this.sourceToken);\n // @ts-expect-error type guard is wrong here\n delete it.key, delete it.sep;\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start, key, sep }]\n });\n }\n else if (start.length > 0) {\n // Not actually at next item\n it.sep = it.sep.concat(start, this.sourceToken);\n }\n else {\n it.sep.push(this.sourceToken);\n }\n }\n else {\n if (!it.sep) {\n Object.assign(it, { key: null, sep: [this.sourceToken] });\n }\n else if (it.value || atNextItem) {\n map.items.push({ start, key: null, sep: [this.sourceToken] });\n }\n else if (includesToken(it.sep, 'map-value-ind')) {\n this.stack.push({\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start: [], key: null, sep: [this.sourceToken] }]\n });\n }\n else {\n it.sep.push(this.sourceToken);\n }\n }\n this.onKeyLine = true;\n return;\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar': {\n const fs = this.flowScalar(this.type);\n if (atNextItem || it.value) {\n map.items.push({ start, key: fs, sep: [] });\n this.onKeyLine = true;\n }\n else if (it.sep) {\n this.stack.push(fs);\n }\n else {\n Object.assign(it, { key: fs, sep: [] });\n this.onKeyLine = true;\n }\n return;\n }\n default: {\n const bv = this.startBlockValue(map);\n if (bv) {\n if (atNextItem &&\n bv.type !== 'block-seq' &&\n includesToken(it.start, 'explicit-key-ind')) {\n map.items.push({ start });\n }\n this.stack.push(bv);\n return;\n }\n }\n }\n }\n yield* this.pop();\n yield* this.step();\n }\n *blockSequence(seq) {\n const it = seq.items[seq.items.length - 1];\n switch (this.type) {\n case 'newline':\n if (it.value) {\n const end = 'end' in it.value ? it.value.end : undefined;\n const last = Array.isArray(end) ? end[end.length - 1] : undefined;\n if (last?.type === 'comment')\n end?.push(this.sourceToken);\n else\n seq.items.push({ start: [this.sourceToken] });\n }\n else\n it.start.push(this.sourceToken);\n return;\n case 'space':\n case 'comment':\n if (it.value)\n seq.items.push({ start: [this.sourceToken] });\n else {\n if (this.atIndentedComment(it.start, seq.indent)) {\n const prev = seq.items[seq.items.length - 2];\n const end = prev?.value?.end;\n if (Array.isArray(end)) {\n Array.prototype.push.apply(end, it.start);\n end.push(this.sourceToken);\n seq.items.pop();\n return;\n }\n }\n it.start.push(this.sourceToken);\n }\n return;\n case 'anchor':\n case 'tag':\n if (it.value || this.indent <= seq.indent)\n break;\n it.start.push(this.sourceToken);\n return;\n case 'seq-item-ind':\n if (this.indent !== seq.indent)\n break;\n if (it.value || includesToken(it.start, 'seq-item-ind'))\n seq.items.push({ start: [this.sourceToken] });\n else\n it.start.push(this.sourceToken);\n return;\n }\n if (this.indent > seq.indent) {\n const bv = this.startBlockValue(seq);\n if (bv) {\n this.stack.push(bv);\n return;\n }\n }\n yield* this.pop();\n yield* this.step();\n }\n *flowCollection(fc) {\n const it = fc.items[fc.items.length - 1];\n if (this.type === 'flow-error-end') {\n let top;\n do {\n yield* this.pop();\n top = this.peek(1);\n } while (top && top.type === 'flow-collection');\n }\n else if (fc.end.length === 0) {\n switch (this.type) {\n case 'comma':\n case 'explicit-key-ind':\n if (!it || it.sep)\n fc.items.push({ start: [this.sourceToken] });\n else\n it.start.push(this.sourceToken);\n return;\n case 'map-value-ind':\n if (!it || it.value)\n fc.items.push({ start: [], key: null, sep: [this.sourceToken] });\n else if (it.sep)\n it.sep.push(this.sourceToken);\n else\n Object.assign(it, { key: null, sep: [this.sourceToken] });\n return;\n case 'space':\n case 'comment':\n case 'newline':\n case 'anchor':\n case 'tag':\n if (!it || it.value)\n fc.items.push({ start: [this.sourceToken] });\n else if (it.sep)\n it.sep.push(this.sourceToken);\n else\n it.start.push(this.sourceToken);\n return;\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar': {\n const fs = this.flowScalar(this.type);\n if (!it || it.value)\n fc.items.push({ start: [], key: fs, sep: [] });\n else if (it.sep)\n this.stack.push(fs);\n else\n Object.assign(it, { key: fs, sep: [] });\n return;\n }\n case 'flow-map-end':\n case 'flow-seq-end':\n fc.end.push(this.sourceToken);\n return;\n }\n const bv = this.startBlockValue(fc);\n /* istanbul ignore else should not happen */\n if (bv)\n this.stack.push(bv);\n else {\n yield* this.pop();\n yield* this.step();\n }\n }\n else {\n const parent = this.peek(2);\n if (parent.type === 'block-map' &&\n ((this.type === 'map-value-ind' && parent.indent === fc.indent) ||\n (this.type === 'newline' &&\n !parent.items[parent.items.length - 1].sep))) {\n yield* this.pop();\n yield* this.step();\n }\n else if (this.type === 'map-value-ind' &&\n parent.type !== 'flow-collection') {\n const prev = getPrevProps(parent);\n const start = getFirstKeyStartProps(prev);\n fixFlowSeqItems(fc);\n const sep = fc.end.splice(1, fc.end.length);\n sep.push(this.sourceToken);\n const map = {\n type: 'block-map',\n offset: fc.offset,\n indent: fc.indent,\n items: [{ start, key: fc, sep }]\n };\n this.onKeyLine = true;\n this.stack[this.stack.length - 1] = map;\n }\n else {\n yield* this.lineEnd(fc);\n }\n }\n }\n flowScalar(type) {\n if (this.onNewLine) {\n let nl = this.source.indexOf('\\n') + 1;\n while (nl !== 0) {\n this.onNewLine(this.offset + nl);\n nl = this.source.indexOf('\\n', nl) + 1;\n }\n }\n return {\n type,\n offset: this.offset,\n indent: this.indent,\n source: this.source\n };\n }\n startBlockValue(parent) {\n switch (this.type) {\n case 'alias':\n case 'scalar':\n case 'single-quoted-scalar':\n case 'double-quoted-scalar':\n return this.flowScalar(this.type);\n case 'block-scalar-header':\n return {\n type: 'block-scalar',\n offset: this.offset,\n indent: this.indent,\n props: [this.sourceToken],\n source: ''\n };\n case 'flow-map-start':\n case 'flow-seq-start':\n return {\n type: 'flow-collection',\n offset: this.offset,\n indent: this.indent,\n start: this.sourceToken,\n items: [],\n end: []\n };\n case 'seq-item-ind':\n return {\n type: 'block-seq',\n offset: this.offset,\n indent: this.indent,\n items: [{ start: [this.sourceToken] }]\n };\n case 'explicit-key-ind': {\n this.onKeyLine = true;\n const prev = getPrevProps(parent);\n const start = getFirstKeyStartProps(prev);\n start.push(this.sourceToken);\n return {\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start }]\n };\n }\n case 'map-value-ind': {\n this.onKeyLine = true;\n const prev = getPrevProps(parent);\n const start = getFirstKeyStartProps(prev);\n return {\n type: 'block-map',\n offset: this.offset,\n indent: this.indent,\n items: [{ start, key: null, sep: [this.sourceToken] }]\n };\n }\n }\n return null;\n }\n atIndentedComment(start, indent) {\n if (this.type !== 'comment')\n return false;\n if (this.indent <= indent)\n return false;\n return start.every(st => st.type === 'newline' || st.type === 'space');\n }\n *documentEnd(docEnd) {\n if (this.type !== 'doc-mode') {\n if (docEnd.end)\n docEnd.end.push(this.sourceToken);\n else\n docEnd.end = [this.sourceToken];\n if (this.type === 'newline')\n yield* this.pop();\n }\n }\n *lineEnd(token) {\n switch (this.type) {\n case 'comma':\n case 'doc-start':\n case 'doc-end':\n case 'flow-seq-end':\n case 'flow-map-end':\n case 'map-value-ind':\n yield* this.pop();\n yield* this.step();\n break;\n case 'newline':\n this.onKeyLine = false;\n // fallthrough\n case 'space':\n case 'comment':\n default:\n // all other values are errors\n if (token.end)\n token.end.push(this.sourceToken);\n else\n token.end = [this.sourceToken];\n if (this.type === 'newline')\n yield* this.pop();\n }\n }\n}\n\nexports.Parser = Parser;\n","'use strict';\n\nvar composer = require('./compose/composer.js');\nvar Document = require('./doc/Document.js');\nvar errors = require('./errors.js');\nvar log = require('./log.js');\nvar lineCounter = require('./parse/line-counter.js');\nvar parser = require('./parse/parser.js');\n\nfunction parseOptions(options) {\n const prettyErrors = options.prettyErrors !== false;\n const lineCounter$1 = options.lineCounter || (prettyErrors && new lineCounter.LineCounter()) || null;\n return { lineCounter: lineCounter$1, prettyErrors };\n}\n/**\n * Parse the input as a stream of YAML documents.\n *\n * Documents should be separated from each other by `...` or `---` marker lines.\n *\n * @returns If an empty `docs` array is returned, it will be of type\n * EmptyStream and contain additional stream information. In\n * TypeScript, you should use `'empty' in docs` as a type guard for it.\n */\nfunction parseAllDocuments(source, options = {}) {\n const { lineCounter, prettyErrors } = parseOptions(options);\n const parser$1 = new parser.Parser(lineCounter?.addNewLine);\n const composer$1 = new composer.Composer(options);\n const docs = Array.from(composer$1.compose(parser$1.parse(source)));\n if (prettyErrors && lineCounter)\n for (const doc of docs) {\n doc.errors.forEach(errors.prettifyError(source, lineCounter));\n doc.warnings.forEach(errors.prettifyError(source, lineCounter));\n }\n if (docs.length > 0)\n return docs;\n return Object.assign([], { empty: true }, composer$1.streamInfo());\n}\n/** Parse an input string into a single YAML.Document */\nfunction parseDocument(source, options = {}) {\n const { lineCounter, prettyErrors } = parseOptions(options);\n const parser$1 = new parser.Parser(lineCounter?.addNewLine);\n const composer$1 = new composer.Composer(options);\n // `doc` is always set by compose.end(true) at the very latest\n let doc = null;\n for (const _doc of composer$1.compose(parser$1.parse(source), true, source.length)) {\n if (!doc)\n doc = _doc;\n else if (doc.options.logLevel !== 'silent') {\n doc.errors.push(new errors.YAMLParseError(_doc.range.slice(0, 2), 'MULTIPLE_DOCS', 'Source contains multiple documents; please use YAML.parseAllDocuments()'));\n break;\n }\n }\n if (prettyErrors && lineCounter) {\n doc.errors.forEach(errors.prettifyError(source, lineCounter));\n doc.warnings.forEach(errors.prettifyError(source, lineCounter));\n }\n return doc;\n}\nfunction parse(src, reviver, options) {\n let _reviver = undefined;\n if (typeof reviver === 'function') {\n _reviver = reviver;\n }\n else if (options === undefined && reviver && typeof reviver === 'object') {\n options = reviver;\n }\n const doc = parseDocument(src, options);\n if (!doc)\n return null;\n doc.warnings.forEach(warning => log.warn(doc.options.logLevel, warning));\n if (doc.errors.length > 0) {\n if (doc.options.logLevel !== 'silent')\n throw doc.errors[0];\n else\n doc.errors = [];\n }\n return doc.toJS(Object.assign({ reviver: _reviver }, options));\n}\nfunction stringify(value, replacer, options) {\n let _replacer = null;\n if (typeof replacer === 'function' || Array.isArray(replacer)) {\n _replacer = replacer;\n }\n else if (options === undefined && replacer) {\n options = replacer;\n }\n if (typeof options === 'string')\n options = options.length;\n if (typeof options === 'number') {\n const indent = Math.round(options);\n options = indent < 1 ? undefined : indent > 8 ? { indent: 8 } : { indent };\n }\n if (value === undefined) {\n const { keepUndefined } = options ?? replacer ?? {};\n if (!keepUndefined)\n return undefined;\n }\n return new Document.Document(value, _replacer, options).toString(options);\n}\n\nexports.parse = parse;\nexports.parseAllDocuments = parseAllDocuments;\nexports.parseDocument = parseDocument;\nexports.stringify = stringify;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar map = require('./common/map.js');\nvar seq = require('./common/seq.js');\nvar string = require('./common/string.js');\nvar tags = require('./tags.js');\n\nconst sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0;\nclass Schema {\n constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }) {\n this.compat = Array.isArray(compat)\n ? tags.getTags(compat, 'compat')\n : compat\n ? tags.getTags(null, compat)\n : null;\n this.merge = !!merge;\n this.name = (typeof schema === 'string' && schema) || 'core';\n this.knownTags = resolveKnownTags ? tags.coreKnownTags : {};\n this.tags = tags.getTags(customTags, this.name);\n this.toStringOptions = toStringDefaults ?? null;\n Object.defineProperty(this, Node.MAP, { value: map.map });\n Object.defineProperty(this, Node.SCALAR, { value: string.string });\n Object.defineProperty(this, Node.SEQ, { value: seq.seq });\n // Used by createMap()\n this.sortMapEntries =\n typeof sortMapEntries === 'function'\n ? sortMapEntries\n : sortMapEntries === true\n ? sortMapEntriesByKey\n : null;\n }\n clone() {\n const copy = Object.create(Schema.prototype, Object.getOwnPropertyDescriptors(this));\n copy.tags = this.tags.slice();\n return copy;\n }\n}\n\nexports.Schema = Schema;\n","'use strict';\n\nvar Node = require('../../nodes/Node.js');\nvar Pair = require('../../nodes/Pair.js');\nvar YAMLMap = require('../../nodes/YAMLMap.js');\n\nfunction createMap(schema, obj, ctx) {\n const { keepUndefined, replacer } = ctx;\n const map = new YAMLMap.YAMLMap(schema);\n const add = (key, value) => {\n if (typeof replacer === 'function')\n value = replacer.call(obj, key, value);\n else if (Array.isArray(replacer) && !replacer.includes(key))\n return;\n if (value !== undefined || keepUndefined)\n map.items.push(Pair.createPair(key, value, ctx));\n };\n if (obj instanceof Map) {\n for (const [key, value] of obj)\n add(key, value);\n }\n else if (obj && typeof obj === 'object') {\n for (const key of Object.keys(obj))\n add(key, obj[key]);\n }\n if (typeof schema.sortMapEntries === 'function') {\n map.items.sort(schema.sortMapEntries);\n }\n return map;\n}\nconst map = {\n collection: 'map',\n createNode: createMap,\n default: true,\n nodeClass: YAMLMap.YAMLMap,\n tag: 'tag:yaml.org,2002:map',\n resolve(map, onError) {\n if (!Node.isMap(map))\n onError('Expected a mapping for this tag');\n return map;\n }\n};\n\nexports.map = map;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\n\nconst nullTag = {\n identify: value => value == null,\n createNode: () => new Scalar.Scalar(null),\n default: true,\n tag: 'tag:yaml.org,2002:null',\n test: /^(?:~|[Nn]ull|NULL)?$/,\n resolve: () => new Scalar.Scalar(null),\n stringify: ({ source }, ctx) => typeof source === 'string' && nullTag.test.test(source)\n ? source\n : ctx.options.nullStr\n};\n\nexports.nullTag = nullTag;\n","'use strict';\n\nvar createNode = require('../../doc/createNode.js');\nvar Node = require('../../nodes/Node.js');\nvar YAMLSeq = require('../../nodes/YAMLSeq.js');\n\nfunction createSeq(schema, obj, ctx) {\n const { replacer } = ctx;\n const seq = new YAMLSeq.YAMLSeq(schema);\n if (obj && Symbol.iterator in Object(obj)) {\n let i = 0;\n for (let it of obj) {\n if (typeof replacer === 'function') {\n const key = obj instanceof Set ? it : String(i++);\n it = replacer.call(obj, key, it);\n }\n seq.items.push(createNode.createNode(it, undefined, ctx));\n }\n }\n return seq;\n}\nconst seq = {\n collection: 'seq',\n createNode: createSeq,\n default: true,\n nodeClass: YAMLSeq.YAMLSeq,\n tag: 'tag:yaml.org,2002:seq',\n resolve(seq, onError) {\n if (!Node.isSeq(seq))\n onError('Expected a sequence for this tag');\n return seq;\n }\n};\n\nexports.seq = seq;\n","'use strict';\n\nvar stringifyString = require('../../stringify/stringifyString.js');\n\nconst string = {\n identify: value => typeof value === 'string',\n default: true,\n tag: 'tag:yaml.org,2002:str',\n resolve: str => str,\n stringify(item, ctx, onComment, onChompKeep) {\n ctx = Object.assign({ actualString: true }, ctx);\n return stringifyString.stringifyString(item, ctx, onComment, onChompKeep);\n }\n};\n\nexports.string = string;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\n\nconst boolTag = {\n identify: value => typeof value === 'boolean',\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/,\n resolve: str => new Scalar.Scalar(str[0] === 't' || str[0] === 'T'),\n stringify({ source, value }, ctx) {\n if (source && boolTag.test.test(source)) {\n const sv = source[0] === 't' || source[0] === 'T';\n if (value === sv)\n return source;\n }\n return value ? ctx.options.trueStr : ctx.options.falseStr;\n }\n};\n\nexports.boolTag = boolTag;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst floatNaN = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^(?:[-+]?\\.(?:inf|Inf|INF|nan|NaN|NAN))$/,\n resolve: str => str.slice(-3).toLowerCase() === 'nan'\n ? NaN\n : str[0] === '-'\n ? Number.NEGATIVE_INFINITY\n : Number.POSITIVE_INFINITY,\n stringify: stringifyNumber.stringifyNumber\n};\nconst floatExp = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n format: 'EXP',\n test: /^[-+]?(?:\\.[0-9]+|[0-9]+(?:\\.[0-9]*)?)[eE][-+]?[0-9]+$/,\n resolve: str => parseFloat(str),\n stringify(node) {\n const num = Number(node.value);\n return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node);\n }\n};\nconst float = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^[-+]?(?:\\.[0-9]+|[0-9]+\\.[0-9]*)$/,\n resolve(str) {\n const node = new Scalar.Scalar(parseFloat(str));\n const dot = str.indexOf('.');\n if (dot !== -1 && str[str.length - 1] === '0')\n node.minFractionDigits = str.length - dot - 1;\n return node;\n },\n stringify: stringifyNumber.stringifyNumber\n};\n\nexports.float = float;\nexports.floatExp = floatExp;\nexports.floatNaN = floatNaN;\n","'use strict';\n\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value);\nconst intResolve = (str, offset, radix, { intAsBigInt }) => (intAsBigInt ? BigInt(str) : parseInt(str.substring(offset), radix));\nfunction intStringify(node, radix, prefix) {\n const { value } = node;\n if (intIdentify(value) && value >= 0)\n return prefix + value.toString(radix);\n return stringifyNumber.stringifyNumber(node);\n}\nconst intOct = {\n identify: value => intIdentify(value) && value >= 0,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'OCT',\n test: /^0o[0-7]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 2, 8, opt),\n stringify: node => intStringify(node, 8, '0o')\n};\nconst int = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n test: /^[-+]?[0-9]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt),\n stringify: stringifyNumber.stringifyNumber\n};\nconst intHex = {\n identify: value => intIdentify(value) && value >= 0,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'HEX',\n test: /^0x[0-9a-fA-F]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt),\n stringify: node => intStringify(node, 16, '0x')\n};\n\nexports.int = int;\nexports.intHex = intHex;\nexports.intOct = intOct;\n","'use strict';\n\nvar map = require('../common/map.js');\nvar _null = require('../common/null.js');\nvar seq = require('../common/seq.js');\nvar string = require('../common/string.js');\nvar bool = require('./bool.js');\nvar float = require('./float.js');\nvar int = require('./int.js');\n\nconst schema = [\n map.map,\n seq.seq,\n string.string,\n _null.nullTag,\n bool.boolTag,\n int.intOct,\n int.int,\n int.intHex,\n float.floatNaN,\n float.floatExp,\n float.float\n];\n\nexports.schema = schema;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar map = require('../common/map.js');\nvar seq = require('../common/seq.js');\n\nfunction intIdentify(value) {\n return typeof value === 'bigint' || Number.isInteger(value);\n}\nconst stringifyJSON = ({ value }) => JSON.stringify(value);\nconst jsonScalars = [\n {\n identify: value => typeof value === 'string',\n default: true,\n tag: 'tag:yaml.org,2002:str',\n resolve: str => str,\n stringify: stringifyJSON\n },\n {\n identify: value => value == null,\n createNode: () => new Scalar.Scalar(null),\n default: true,\n tag: 'tag:yaml.org,2002:null',\n test: /^null$/,\n resolve: () => null,\n stringify: stringifyJSON\n },\n {\n identify: value => typeof value === 'boolean',\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^true|false$/,\n resolve: str => str === 'true',\n stringify: stringifyJSON\n },\n {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n test: /^-?(?:0|[1-9][0-9]*)$/,\n resolve: (str, _onError, { intAsBigInt }) => intAsBigInt ? BigInt(str) : parseInt(str, 10),\n stringify: ({ value }) => intIdentify(value) ? value.toString() : JSON.stringify(value)\n },\n {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^-?(?:0|[1-9][0-9]*)(?:\\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/,\n resolve: str => parseFloat(str),\n stringify: stringifyJSON\n }\n];\nconst jsonError = {\n default: true,\n tag: '',\n test: /^/,\n resolve(str, onError) {\n onError(`Unresolved plain scalar ${JSON.stringify(str)}`);\n return str;\n }\n};\nconst schema = [map.map, seq.seq].concat(jsonScalars, jsonError);\n\nexports.schema = schema;\n","'use strict';\n\nvar map = require('./common/map.js');\nvar _null = require('./common/null.js');\nvar seq = require('./common/seq.js');\nvar string = require('./common/string.js');\nvar bool = require('./core/bool.js');\nvar float = require('./core/float.js');\nvar int = require('./core/int.js');\nvar schema = require('./core/schema.js');\nvar schema$1 = require('./json/schema.js');\nvar binary = require('./yaml-1.1/binary.js');\nvar omap = require('./yaml-1.1/omap.js');\nvar pairs = require('./yaml-1.1/pairs.js');\nvar schema$2 = require('./yaml-1.1/schema.js');\nvar set = require('./yaml-1.1/set.js');\nvar timestamp = require('./yaml-1.1/timestamp.js');\n\nconst schemas = new Map([\n ['core', schema.schema],\n ['failsafe', [map.map, seq.seq, string.string]],\n ['json', schema$1.schema],\n ['yaml11', schema$2.schema],\n ['yaml-1.1', schema$2.schema]\n]);\nconst tagsByName = {\n binary: binary.binary,\n bool: bool.boolTag,\n float: float.float,\n floatExp: float.floatExp,\n floatNaN: float.floatNaN,\n floatTime: timestamp.floatTime,\n int: int.int,\n intHex: int.intHex,\n intOct: int.intOct,\n intTime: timestamp.intTime,\n map: map.map,\n null: _null.nullTag,\n omap: omap.omap,\n pairs: pairs.pairs,\n seq: seq.seq,\n set: set.set,\n timestamp: timestamp.timestamp\n};\nconst coreKnownTags = {\n 'tag:yaml.org,2002:binary': binary.binary,\n 'tag:yaml.org,2002:omap': omap.omap,\n 'tag:yaml.org,2002:pairs': pairs.pairs,\n 'tag:yaml.org,2002:set': set.set,\n 'tag:yaml.org,2002:timestamp': timestamp.timestamp\n};\nfunction getTags(customTags, schemaName) {\n let tags = schemas.get(schemaName);\n if (!tags) {\n if (Array.isArray(customTags))\n tags = [];\n else {\n const keys = Array.from(schemas.keys())\n .filter(key => key !== 'yaml11')\n .map(key => JSON.stringify(key))\n .join(', ');\n throw new Error(`Unknown schema \"${schemaName}\"; use one of ${keys} or define customTags array`);\n }\n }\n if (Array.isArray(customTags)) {\n for (const tag of customTags)\n tags = tags.concat(tag);\n }\n else if (typeof customTags === 'function') {\n tags = customTags(tags.slice());\n }\n return tags.map(tag => {\n if (typeof tag !== 'string')\n return tag;\n const tagObj = tagsByName[tag];\n if (tagObj)\n return tagObj;\n const keys = Object.keys(tagsByName)\n .map(key => JSON.stringify(key))\n .join(', ');\n throw new Error(`Unknown custom tag \"${tag}\"; use one of ${keys}`);\n });\n}\n\nexports.coreKnownTags = coreKnownTags;\nexports.getTags = getTags;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar stringifyString = require('../../stringify/stringifyString.js');\n\nconst binary = {\n identify: value => value instanceof Uint8Array,\n default: false,\n tag: 'tag:yaml.org,2002:binary',\n /**\n * Returns a Buffer in node and an Uint8Array in browsers\n *\n * To use the resulting buffer as an image, you'll want to do something like:\n *\n * const blob = new Blob([buffer], { type: 'image/jpeg' })\n * document.querySelector('#photo').src = URL.createObjectURL(blob)\n */\n resolve(src, onError) {\n if (typeof Buffer === 'function') {\n return Buffer.from(src, 'base64');\n }\n else if (typeof atob === 'function') {\n // On IE 11, atob() can't handle newlines\n const str = atob(src.replace(/[\\n\\r]/g, ''));\n const buffer = new Uint8Array(str.length);\n for (let i = 0; i < str.length; ++i)\n buffer[i] = str.charCodeAt(i);\n return buffer;\n }\n else {\n onError('This environment does not support reading binary tags; either Buffer or atob is required');\n return src;\n }\n },\n stringify({ comment, type, value }, ctx, onComment, onChompKeep) {\n const buf = value; // checked earlier by binary.identify()\n let str;\n if (typeof Buffer === 'function') {\n str =\n buf instanceof Buffer\n ? buf.toString('base64')\n : Buffer.from(buf.buffer).toString('base64');\n }\n else if (typeof btoa === 'function') {\n let s = '';\n for (let i = 0; i < buf.length; ++i)\n s += String.fromCharCode(buf[i]);\n str = btoa(s);\n }\n else {\n throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required');\n }\n if (!type)\n type = Scalar.Scalar.BLOCK_LITERAL;\n if (type !== Scalar.Scalar.QUOTE_DOUBLE) {\n const lineWidth = Math.max(ctx.options.lineWidth - ctx.indent.length, ctx.options.minContentWidth);\n const n = Math.ceil(str.length / lineWidth);\n const lines = new Array(n);\n for (let i = 0, o = 0; i < n; ++i, o += lineWidth) {\n lines[i] = str.substr(o, lineWidth);\n }\n str = lines.join(type === Scalar.Scalar.BLOCK_LITERAL ? '\\n' : ' ');\n }\n return stringifyString.stringifyString({ comment, type, value: str }, ctx, onComment, onChompKeep);\n }\n};\n\nexports.binary = binary;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\n\nfunction boolStringify({ value, source }, ctx) {\n const boolObj = value ? trueTag : falseTag;\n if (source && boolObj.test.test(source))\n return source;\n return value ? ctx.options.trueStr : ctx.options.falseStr;\n}\nconst trueTag = {\n identify: value => value === true,\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/,\n resolve: () => new Scalar.Scalar(true),\n stringify: boolStringify\n};\nconst falseTag = {\n identify: value => value === false,\n default: true,\n tag: 'tag:yaml.org,2002:bool',\n test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/i,\n resolve: () => new Scalar.Scalar(false),\n stringify: boolStringify\n};\n\nexports.falseTag = falseTag;\nexports.trueTag = trueTag;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst floatNaN = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^[-+]?\\.(?:inf|Inf|INF|nan|NaN|NAN)$/,\n resolve: (str) => str.slice(-3).toLowerCase() === 'nan'\n ? NaN\n : str[0] === '-'\n ? Number.NEGATIVE_INFINITY\n : Number.POSITIVE_INFINITY,\n stringify: stringifyNumber.stringifyNumber\n};\nconst floatExp = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n format: 'EXP',\n test: /^[-+]?(?:[0-9][0-9_]*)?(?:\\.[0-9_]*)?[eE][-+]?[0-9]+$/,\n resolve: (str) => parseFloat(str.replace(/_/g, '')),\n stringify(node) {\n const num = Number(node.value);\n return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node);\n }\n};\nconst float = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n test: /^[-+]?(?:[0-9][0-9_]*)?\\.[0-9_]*$/,\n resolve(str) {\n const node = new Scalar.Scalar(parseFloat(str.replace(/_/g, '')));\n const dot = str.indexOf('.');\n if (dot !== -1) {\n const f = str.substring(dot + 1).replace(/_/g, '');\n if (f[f.length - 1] === '0')\n node.minFractionDigits = f.length;\n }\n return node;\n },\n stringify: stringifyNumber.stringifyNumber\n};\n\nexports.float = float;\nexports.floatExp = floatExp;\nexports.floatNaN = floatNaN;\n","'use strict';\n\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value);\nfunction intResolve(str, offset, radix, { intAsBigInt }) {\n const sign = str[0];\n if (sign === '-' || sign === '+')\n offset += 1;\n str = str.substring(offset).replace(/_/g, '');\n if (intAsBigInt) {\n switch (radix) {\n case 2:\n str = `0b${str}`;\n break;\n case 8:\n str = `0o${str}`;\n break;\n case 16:\n str = `0x${str}`;\n break;\n }\n const n = BigInt(str);\n return sign === '-' ? BigInt(-1) * n : n;\n }\n const n = parseInt(str, radix);\n return sign === '-' ? -1 * n : n;\n}\nfunction intStringify(node, radix, prefix) {\n const { value } = node;\n if (intIdentify(value)) {\n const str = value.toString(radix);\n return value < 0 ? '-' + prefix + str.substr(1) : prefix + str;\n }\n return stringifyNumber.stringifyNumber(node);\n}\nconst intBin = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'BIN',\n test: /^[-+]?0b[0-1_]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 2, 2, opt),\n stringify: node => intStringify(node, 2, '0b')\n};\nconst intOct = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'OCT',\n test: /^[-+]?0[0-7_]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 1, 8, opt),\n stringify: node => intStringify(node, 8, '0')\n};\nconst int = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n test: /^[-+]?[0-9][0-9_]*$/,\n resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt),\n stringify: stringifyNumber.stringifyNumber\n};\nconst intHex = {\n identify: intIdentify,\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'HEX',\n test: /^[-+]?0x[0-9a-fA-F_]+$/,\n resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt),\n stringify: node => intStringify(node, 16, '0x')\n};\n\nexports.int = int;\nexports.intBin = intBin;\nexports.intHex = intHex;\nexports.intOct = intOct;\n","'use strict';\n\nvar YAMLSeq = require('../../nodes/YAMLSeq.js');\nvar toJS = require('../../nodes/toJS.js');\nvar Node = require('../../nodes/Node.js');\nvar YAMLMap = require('../../nodes/YAMLMap.js');\nvar pairs = require('./pairs.js');\n\nclass YAMLOMap extends YAMLSeq.YAMLSeq {\n constructor() {\n super();\n this.add = YAMLMap.YAMLMap.prototype.add.bind(this);\n this.delete = YAMLMap.YAMLMap.prototype.delete.bind(this);\n this.get = YAMLMap.YAMLMap.prototype.get.bind(this);\n this.has = YAMLMap.YAMLMap.prototype.has.bind(this);\n this.set = YAMLMap.YAMLMap.prototype.set.bind(this);\n this.tag = YAMLOMap.tag;\n }\n /**\n * If `ctx` is given, the return type is actually `Map`,\n * but TypeScript won't allow widening the signature of a child method.\n */\n toJSON(_, ctx) {\n if (!ctx)\n return super.toJSON(_);\n const map = new Map();\n if (ctx?.onCreate)\n ctx.onCreate(map);\n for (const pair of this.items) {\n let key, value;\n if (Node.isPair(pair)) {\n key = toJS.toJS(pair.key, '', ctx);\n value = toJS.toJS(pair.value, key, ctx);\n }\n else {\n key = toJS.toJS(pair, '', ctx);\n }\n if (map.has(key))\n throw new Error('Ordered maps must not include duplicate keys');\n map.set(key, value);\n }\n return map;\n }\n}\nYAMLOMap.tag = 'tag:yaml.org,2002:omap';\nconst omap = {\n collection: 'seq',\n identify: value => value instanceof Map,\n nodeClass: YAMLOMap,\n default: false,\n tag: 'tag:yaml.org,2002:omap',\n resolve(seq, onError) {\n const pairs$1 = pairs.resolvePairs(seq, onError);\n const seenKeys = [];\n for (const { key } of pairs$1.items) {\n if (Node.isScalar(key)) {\n if (seenKeys.includes(key.value)) {\n onError(`Ordered maps must not include duplicate keys: ${key.value}`);\n }\n else {\n seenKeys.push(key.value);\n }\n }\n }\n return Object.assign(new YAMLOMap(), pairs$1);\n },\n createNode(schema, iterable, ctx) {\n const pairs$1 = pairs.createPairs(schema, iterable, ctx);\n const omap = new YAMLOMap();\n omap.items = pairs$1.items;\n return omap;\n }\n};\n\nexports.YAMLOMap = YAMLOMap;\nexports.omap = omap;\n","'use strict';\n\nvar Node = require('../../nodes/Node.js');\nvar Pair = require('../../nodes/Pair.js');\nvar Scalar = require('../../nodes/Scalar.js');\nvar YAMLSeq = require('../../nodes/YAMLSeq.js');\n\nfunction resolvePairs(seq, onError) {\n if (Node.isSeq(seq)) {\n for (let i = 0; i < seq.items.length; ++i) {\n let item = seq.items[i];\n if (Node.isPair(item))\n continue;\n else if (Node.isMap(item)) {\n if (item.items.length > 1)\n onError('Each pair must have its own sequence indicator');\n const pair = item.items[0] || new Pair.Pair(new Scalar.Scalar(null));\n if (item.commentBefore)\n pair.key.commentBefore = pair.key.commentBefore\n ? `${item.commentBefore}\\n${pair.key.commentBefore}`\n : item.commentBefore;\n if (item.comment) {\n const cn = pair.value ?? pair.key;\n cn.comment = cn.comment\n ? `${item.comment}\\n${cn.comment}`\n : item.comment;\n }\n item = pair;\n }\n seq.items[i] = Node.isPair(item) ? item : new Pair.Pair(item);\n }\n }\n else\n onError('Expected a sequence for this tag');\n return seq;\n}\nfunction createPairs(schema, iterable, ctx) {\n const { replacer } = ctx;\n const pairs = new YAMLSeq.YAMLSeq(schema);\n pairs.tag = 'tag:yaml.org,2002:pairs';\n let i = 0;\n if (iterable && Symbol.iterator in Object(iterable))\n for (let it of iterable) {\n if (typeof replacer === 'function')\n it = replacer.call(iterable, String(i++), it);\n let key, value;\n if (Array.isArray(it)) {\n if (it.length === 2) {\n key = it[0];\n value = it[1];\n }\n else\n throw new TypeError(`Expected [key, value] tuple: ${it}`);\n }\n else if (it && it instanceof Object) {\n const keys = Object.keys(it);\n if (keys.length === 1) {\n key = keys[0];\n value = it[key];\n }\n else\n throw new TypeError(`Expected { key: value } tuple: ${it}`);\n }\n else {\n key = it;\n }\n pairs.items.push(Pair.createPair(key, value, ctx));\n }\n return pairs;\n}\nconst pairs = {\n collection: 'seq',\n default: false,\n tag: 'tag:yaml.org,2002:pairs',\n resolve: resolvePairs,\n createNode: createPairs\n};\n\nexports.createPairs = createPairs;\nexports.pairs = pairs;\nexports.resolvePairs = resolvePairs;\n","'use strict';\n\nvar map = require('../common/map.js');\nvar _null = require('../common/null.js');\nvar seq = require('../common/seq.js');\nvar string = require('../common/string.js');\nvar binary = require('./binary.js');\nvar bool = require('./bool.js');\nvar float = require('./float.js');\nvar int = require('./int.js');\nvar omap = require('./omap.js');\nvar pairs = require('./pairs.js');\nvar set = require('./set.js');\nvar timestamp = require('./timestamp.js');\n\nconst schema = [\n map.map,\n seq.seq,\n string.string,\n _null.nullTag,\n bool.trueTag,\n bool.falseTag,\n int.intBin,\n int.intOct,\n int.int,\n int.intHex,\n float.floatNaN,\n float.floatExp,\n float.float,\n binary.binary,\n omap.omap,\n pairs.pairs,\n set.set,\n timestamp.intTime,\n timestamp.floatTime,\n timestamp.timestamp\n];\n\nexports.schema = schema;\n","'use strict';\n\nvar Node = require('../../nodes/Node.js');\nvar Pair = require('../../nodes/Pair.js');\nvar YAMLMap = require('../../nodes/YAMLMap.js');\n\nclass YAMLSet extends YAMLMap.YAMLMap {\n constructor(schema) {\n super(schema);\n this.tag = YAMLSet.tag;\n }\n add(key) {\n let pair;\n if (Node.isPair(key))\n pair = key;\n else if (typeof key === 'object' &&\n 'key' in key &&\n 'value' in key &&\n key.value === null)\n pair = new Pair.Pair(key.key, null);\n else\n pair = new Pair.Pair(key, null);\n const prev = YAMLMap.findPair(this.items, pair.key);\n if (!prev)\n this.items.push(pair);\n }\n /**\n * If `keepPair` is `true`, returns the Pair matching `key`.\n * Otherwise, returns the value of that Pair's key.\n */\n get(key, keepPair) {\n const pair = YAMLMap.findPair(this.items, key);\n return !keepPair && Node.isPair(pair)\n ? Node.isScalar(pair.key)\n ? pair.key.value\n : pair.key\n : pair;\n }\n set(key, value) {\n if (typeof value !== 'boolean')\n throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`);\n const prev = YAMLMap.findPair(this.items, key);\n if (prev && !value) {\n this.items.splice(this.items.indexOf(prev), 1);\n }\n else if (!prev && value) {\n this.items.push(new Pair.Pair(key));\n }\n }\n toJSON(_, ctx) {\n return super.toJSON(_, ctx, Set);\n }\n toString(ctx, onComment, onChompKeep) {\n if (!ctx)\n return JSON.stringify(this);\n if (this.hasAllNullValues(true))\n return super.toString(Object.assign({}, ctx, { allNullValues: true }), onComment, onChompKeep);\n else\n throw new Error('Set items must all have null values');\n }\n}\nYAMLSet.tag = 'tag:yaml.org,2002:set';\nconst set = {\n collection: 'map',\n identify: value => value instanceof Set,\n nodeClass: YAMLSet,\n default: false,\n tag: 'tag:yaml.org,2002:set',\n resolve(map, onError) {\n if (Node.isMap(map)) {\n if (map.hasAllNullValues(true))\n return Object.assign(new YAMLSet(), map);\n else\n onError('Set items must all have null values');\n }\n else\n onError('Expected a mapping for this tag');\n return map;\n },\n createNode(schema, iterable, ctx) {\n const { replacer } = ctx;\n const set = new YAMLSet(schema);\n if (iterable && Symbol.iterator in Object(iterable))\n for (let value of iterable) {\n if (typeof replacer === 'function')\n value = replacer.call(iterable, value, value);\n set.items.push(Pair.createPair(value, null, ctx));\n }\n return set;\n }\n};\n\nexports.YAMLSet = YAMLSet;\nexports.set = set;\n","'use strict';\n\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\n/** Internal types handle bigint as number, because TS can't figure it out. */\nfunction parseSexagesimal(str, asBigInt) {\n const sign = str[0];\n const parts = sign === '-' || sign === '+' ? str.substring(1) : str;\n const num = (n) => asBigInt ? BigInt(n) : Number(n);\n const res = parts\n .replace(/_/g, '')\n .split(':')\n .reduce((res, p) => res * num(60) + num(p), num(0));\n return (sign === '-' ? num(-1) * res : res);\n}\n/**\n * hhhh:mm:ss.sss\n *\n * Internal types handle bigint as number, because TS can't figure it out.\n */\nfunction stringifySexagesimal(node) {\n let { value } = node;\n let num = (n) => n;\n if (typeof value === 'bigint')\n num = n => BigInt(n);\n else if (isNaN(value) || !isFinite(value))\n return stringifyNumber.stringifyNumber(node);\n let sign = '';\n if (value < 0) {\n sign = '-';\n value *= num(-1);\n }\n const _60 = num(60);\n const parts = [value % _60]; // seconds, including ms\n if (value < 60) {\n parts.unshift(0); // at least one : is required\n }\n else {\n value = (value - parts[0]) / _60;\n parts.unshift(value % _60); // minutes\n if (value >= 60) {\n value = (value - parts[0]) / _60;\n parts.unshift(value); // hours\n }\n }\n return (sign +\n parts\n .map(n => (n < 10 ? '0' + String(n) : String(n)))\n .join(':')\n .replace(/000000\\d*$/, '') // % 60 may introduce error\n );\n}\nconst intTime = {\n identify: value => typeof value === 'bigint' || Number.isInteger(value),\n default: true,\n tag: 'tag:yaml.org,2002:int',\n format: 'TIME',\n test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+$/,\n resolve: (str, _onError, { intAsBigInt }) => parseSexagesimal(str, intAsBigInt),\n stringify: stringifySexagesimal\n};\nconst floatTime = {\n identify: value => typeof value === 'number',\n default: true,\n tag: 'tag:yaml.org,2002:float',\n format: 'TIME',\n test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]*$/,\n resolve: str => parseSexagesimal(str, false),\n stringify: stringifySexagesimal\n};\nconst timestamp = {\n identify: value => value instanceof Date,\n default: true,\n tag: 'tag:yaml.org,2002:timestamp',\n // If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part\n // may be omitted altogether, resulting in a date format. In such a case, the time part is\n // assumed to be 00:00:00Z (start of day, UTC).\n test: RegExp('^([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd\n '(?:' + // time is optional\n '(?:t|T|[ \\\\t]+)' + // t | T | whitespace\n '([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)?\n '(?:[ \\\\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30\n ')?$'),\n resolve(str) {\n const match = str.match(timestamp.test);\n if (!match)\n throw new Error('!!timestamp expects a date, starting with yyyy-mm-dd');\n const [, year, month, day, hour, minute, second] = match.map(Number);\n const millisec = match[7] ? Number((match[7] + '00').substr(1, 3)) : 0;\n let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec);\n const tz = match[8];\n if (tz && tz !== 'Z') {\n let d = parseSexagesimal(tz, false);\n if (Math.abs(d) < 30)\n d *= 60;\n date -= 60000 * d;\n }\n return new Date(date);\n },\n stringify: ({ value }) => value.toISOString().replace(/((T00:00)?:00)?\\.000Z$/, '')\n};\n\nexports.floatTime = floatTime;\nexports.intTime = intTime;\nexports.timestamp = timestamp;\n","'use strict';\n\nconst FOLD_FLOW = 'flow';\nconst FOLD_BLOCK = 'block';\nconst FOLD_QUOTED = 'quoted';\n/**\n * Tries to keep input at up to `lineWidth` characters, splitting only on spaces\n * not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are\n * terminated with `\\n` and started with `indent`.\n */\nfunction foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth = 80, minContentWidth = 20, onFold, onOverflow } = {}) {\n if (!lineWidth || lineWidth < 0)\n return text;\n const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length);\n if (text.length <= endStep)\n return text;\n const folds = [];\n const escapedFolds = {};\n let end = lineWidth - indent.length;\n if (typeof indentAtStart === 'number') {\n if (indentAtStart > lineWidth - Math.max(2, minContentWidth))\n folds.push(0);\n else\n end = lineWidth - indentAtStart;\n }\n let split = undefined;\n let prev = undefined;\n let overflow = false;\n let i = -1;\n let escStart = -1;\n let escEnd = -1;\n if (mode === FOLD_BLOCK) {\n i = consumeMoreIndentedLines(text, i);\n if (i !== -1)\n end = i + endStep;\n }\n for (let ch; (ch = text[(i += 1)]);) {\n if (mode === FOLD_QUOTED && ch === '\\\\') {\n escStart = i;\n switch (text[i + 1]) {\n case 'x':\n i += 3;\n break;\n case 'u':\n i += 5;\n break;\n case 'U':\n i += 9;\n break;\n default:\n i += 1;\n }\n escEnd = i;\n }\n if (ch === '\\n') {\n if (mode === FOLD_BLOCK)\n i = consumeMoreIndentedLines(text, i);\n end = i + endStep;\n split = undefined;\n }\n else {\n if (ch === ' ' &&\n prev &&\n prev !== ' ' &&\n prev !== '\\n' &&\n prev !== '\\t') {\n // space surrounded by non-space can be replaced with newline + indent\n const next = text[i + 1];\n if (next && next !== ' ' && next !== '\\n' && next !== '\\t')\n split = i;\n }\n if (i >= end) {\n if (split) {\n folds.push(split);\n end = split + endStep;\n split = undefined;\n }\n else if (mode === FOLD_QUOTED) {\n // white-space collected at end may stretch past lineWidth\n while (prev === ' ' || prev === '\\t') {\n prev = ch;\n ch = text[(i += 1)];\n overflow = true;\n }\n // Account for newline escape, but don't break preceding escape\n const j = i > escEnd + 1 ? i - 2 : escStart - 1;\n // Bail out if lineWidth & minContentWidth are shorter than an escape string\n if (escapedFolds[j])\n return text;\n folds.push(j);\n escapedFolds[j] = true;\n end = j + endStep;\n split = undefined;\n }\n else {\n overflow = true;\n }\n }\n }\n prev = ch;\n }\n if (overflow && onOverflow)\n onOverflow();\n if (folds.length === 0)\n return text;\n if (onFold)\n onFold();\n let res = text.slice(0, folds[0]);\n for (let i = 0; i < folds.length; ++i) {\n const fold = folds[i];\n const end = folds[i + 1] || text.length;\n if (fold === 0)\n res = `\\n${indent}${text.slice(0, end)}`;\n else {\n if (mode === FOLD_QUOTED && escapedFolds[fold])\n res += `${text[fold]}\\\\`;\n res += `\\n${indent}${text.slice(fold + 1, end)}`;\n }\n }\n return res;\n}\n/**\n * Presumes `i + 1` is at the start of a line\n * @returns index of last newline in more-indented block\n */\nfunction consumeMoreIndentedLines(text, i) {\n let ch = text[i + 1];\n while (ch === ' ' || ch === '\\t') {\n do {\n ch = text[(i += 1)];\n } while (ch && ch !== '\\n');\n ch = text[i + 1];\n }\n return i;\n}\n\nexports.FOLD_BLOCK = FOLD_BLOCK;\nexports.FOLD_FLOW = FOLD_FLOW;\nexports.FOLD_QUOTED = FOLD_QUOTED;\nexports.foldFlowLines = foldFlowLines;\n","'use strict';\n\nvar anchors = require('../doc/anchors.js');\nvar Node = require('../nodes/Node.js');\nvar stringifyComment = require('./stringifyComment.js');\nvar stringifyString = require('./stringifyString.js');\n\nfunction createStringifyContext(doc, options) {\n const opt = Object.assign({\n blockQuote: true,\n commentString: stringifyComment.stringifyComment,\n defaultKeyType: null,\n defaultStringType: 'PLAIN',\n directives: null,\n doubleQuotedAsJSON: false,\n doubleQuotedMinMultiLineLength: 40,\n falseStr: 'false',\n indentSeq: true,\n lineWidth: 80,\n minContentWidth: 20,\n nullStr: 'null',\n simpleKeys: false,\n singleQuote: null,\n trueStr: 'true',\n verifyAliasOrder: true\n }, doc.schema.toStringOptions, options);\n let inFlow;\n switch (opt.collectionStyle) {\n case 'block':\n inFlow = false;\n break;\n case 'flow':\n inFlow = true;\n break;\n default:\n inFlow = null;\n }\n return {\n anchors: new Set(),\n doc,\n indent: '',\n indentStep: typeof opt.indent === 'number' ? ' '.repeat(opt.indent) : ' ',\n inFlow,\n options: opt\n };\n}\nfunction getTagObject(tags, item) {\n if (item.tag) {\n const match = tags.filter(t => t.tag === item.tag);\n if (match.length > 0)\n return match.find(t => t.format === item.format) ?? match[0];\n }\n let tagObj = undefined;\n let obj;\n if (Node.isScalar(item)) {\n obj = item.value;\n const match = tags.filter(t => t.identify?.(obj));\n tagObj =\n match.find(t => t.format === item.format) ?? match.find(t => !t.format);\n }\n else {\n obj = item;\n tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass);\n }\n if (!tagObj) {\n const name = obj?.constructor?.name ?? typeof obj;\n throw new Error(`Tag not resolved for ${name} value`);\n }\n return tagObj;\n}\n// needs to be called before value stringifier to allow for circular anchor refs\nfunction stringifyProps(node, tagObj, { anchors: anchors$1, doc }) {\n if (!doc.directives)\n return '';\n const props = [];\n const anchor = (Node.isScalar(node) || Node.isCollection(node)) && node.anchor;\n if (anchor && anchors.anchorIsValid(anchor)) {\n anchors$1.add(anchor);\n props.push(`&${anchor}`);\n }\n const tag = node.tag ? node.tag : tagObj.default ? null : tagObj.tag;\n if (tag)\n props.push(doc.directives.tagString(tag));\n return props.join(' ');\n}\nfunction stringify(item, ctx, onComment, onChompKeep) {\n if (Node.isPair(item))\n return item.toString(ctx, onComment, onChompKeep);\n if (Node.isAlias(item)) {\n if (ctx.doc.directives)\n return item.toString(ctx);\n if (ctx.resolvedAliases?.has(item)) {\n throw new TypeError(`Cannot stringify circular structure without alias nodes`);\n }\n else {\n if (ctx.resolvedAliases)\n ctx.resolvedAliases.add(item);\n else\n ctx.resolvedAliases = new Set([item]);\n item = item.resolve(ctx.doc);\n }\n }\n let tagObj = undefined;\n const node = Node.isNode(item)\n ? item\n : ctx.doc.createNode(item, { onTagObj: o => (tagObj = o) });\n if (!tagObj)\n tagObj = getTagObject(ctx.doc.schema.tags, node);\n const props = stringifyProps(node, tagObj, ctx);\n if (props.length > 0)\n ctx.indentAtStart = (ctx.indentAtStart ?? 0) + props.length + 1;\n const str = typeof tagObj.stringify === 'function'\n ? tagObj.stringify(node, ctx, onComment, onChompKeep)\n : Node.isScalar(node)\n ? stringifyString.stringifyString(node, ctx, onComment, onChompKeep)\n : node.toString(ctx, onComment, onChompKeep);\n if (!props)\n return str;\n return Node.isScalar(node) || str[0] === '{' || str[0] === '['\n ? `${props} ${str}`\n : `${props}\\n${ctx.indent}${str}`;\n}\n\nexports.createStringifyContext = createStringifyContext;\nexports.stringify = stringify;\n","'use strict';\n\nvar Collection = require('../nodes/Collection.js');\nvar Node = require('../nodes/Node.js');\nvar stringify = require('./stringify.js');\nvar stringifyComment = require('./stringifyComment.js');\n\nfunction stringifyCollection(collection, ctx, options) {\n const flow = ctx.inFlow ?? collection.flow;\n const stringify = flow ? stringifyFlowCollection : stringifyBlockCollection;\n return stringify(collection, ctx, options);\n}\nfunction stringifyBlockCollection({ comment, items }, ctx, { blockItemPrefix, flowChars, itemIndent, onChompKeep, onComment }) {\n const { indent, options: { commentString } } = ctx;\n const itemCtx = Object.assign({}, ctx, { indent: itemIndent, type: null });\n let chompKeep = false; // flag for the preceding node's status\n const lines = [];\n for (let i = 0; i < items.length; ++i) {\n const item = items[i];\n let comment = null;\n if (Node.isNode(item)) {\n if (!chompKeep && item.spaceBefore)\n lines.push('');\n addCommentBefore(ctx, lines, item.commentBefore, chompKeep);\n if (item.comment)\n comment = item.comment;\n }\n else if (Node.isPair(item)) {\n const ik = Node.isNode(item.key) ? item.key : null;\n if (ik) {\n if (!chompKeep && ik.spaceBefore)\n lines.push('');\n addCommentBefore(ctx, lines, ik.commentBefore, chompKeep);\n }\n }\n chompKeep = false;\n let str = stringify.stringify(item, itemCtx, () => (comment = null), () => (chompKeep = true));\n if (comment)\n str += stringifyComment.lineComment(str, itemIndent, commentString(comment));\n if (chompKeep && comment)\n chompKeep = false;\n lines.push(blockItemPrefix + str);\n }\n let str;\n if (lines.length === 0) {\n str = flowChars.start + flowChars.end;\n }\n else {\n str = lines[0];\n for (let i = 1; i < lines.length; ++i) {\n const line = lines[i];\n str += line ? `\\n${indent}${line}` : '\\n';\n }\n }\n if (comment) {\n str += '\\n' + stringifyComment.indentComment(commentString(comment), indent);\n if (onComment)\n onComment();\n }\n else if (chompKeep && onChompKeep)\n onChompKeep();\n return str;\n}\nfunction stringifyFlowCollection({ comment, items }, ctx, { flowChars, itemIndent, onComment }) {\n const { indent, indentStep, options: { commentString } } = ctx;\n itemIndent += indentStep;\n const itemCtx = Object.assign({}, ctx, {\n indent: itemIndent,\n inFlow: true,\n type: null\n });\n let reqNewline = false;\n let linesAtValue = 0;\n const lines = [];\n for (let i = 0; i < items.length; ++i) {\n const item = items[i];\n let comment = null;\n if (Node.isNode(item)) {\n if (item.spaceBefore)\n lines.push('');\n addCommentBefore(ctx, lines, item.commentBefore, false);\n if (item.comment)\n comment = item.comment;\n }\n else if (Node.isPair(item)) {\n const ik = Node.isNode(item.key) ? item.key : null;\n if (ik) {\n if (ik.spaceBefore)\n lines.push('');\n addCommentBefore(ctx, lines, ik.commentBefore, false);\n if (ik.comment)\n reqNewline = true;\n }\n const iv = Node.isNode(item.value) ? item.value : null;\n if (iv) {\n if (iv.comment)\n comment = iv.comment;\n if (iv.commentBefore)\n reqNewline = true;\n }\n else if (item.value == null && ik && ik.comment) {\n comment = ik.comment;\n }\n }\n if (comment)\n reqNewline = true;\n let str = stringify.stringify(item, itemCtx, () => (comment = null));\n if (i < items.length - 1)\n str += ',';\n if (comment)\n str += stringifyComment.lineComment(str, itemIndent, commentString(comment));\n if (!reqNewline && (lines.length > linesAtValue || str.includes('\\n')))\n reqNewline = true;\n lines.push(str);\n linesAtValue = lines.length;\n }\n let str;\n const { start, end } = flowChars;\n if (lines.length === 0) {\n str = start + end;\n }\n else {\n if (!reqNewline) {\n const len = lines.reduce((sum, line) => sum + line.length + 2, 2);\n reqNewline = len > Collection.Collection.maxFlowStringSingleLineLength;\n }\n if (reqNewline) {\n str = start;\n for (const line of lines)\n str += line ? `\\n${indentStep}${indent}${line}` : '\\n';\n str += `\\n${indent}${end}`;\n }\n else {\n str = `${start} ${lines.join(' ')} ${end}`;\n }\n }\n if (comment) {\n str += stringifyComment.lineComment(str, commentString(comment), indent);\n if (onComment)\n onComment();\n }\n return str;\n}\nfunction addCommentBefore({ indent, options: { commentString } }, lines, comment, chompKeep) {\n if (comment && chompKeep)\n comment = comment.replace(/^\\n+/, '');\n if (comment) {\n const ic = stringifyComment.indentComment(commentString(comment), indent);\n lines.push(ic.trimStart()); // Avoid double indent on first line\n }\n}\n\nexports.stringifyCollection = stringifyCollection;\n","'use strict';\n\n/**\n * Stringifies a comment.\n *\n * Empty comment lines are left empty,\n * lines consisting of a single space are replaced by `#`,\n * and all other lines are prefixed with a `#`.\n */\nconst stringifyComment = (str) => str.replace(/^(?!$)(?: $)?/gm, '#');\nfunction indentComment(comment, indent) {\n if (/^\\n+$/.test(comment))\n return comment.substring(1);\n return indent ? comment.replace(/^(?! *$)/gm, indent) : comment;\n}\nconst lineComment = (str, indent, comment) => str.endsWith('\\n')\n ? indentComment(comment, indent)\n : comment.includes('\\n')\n ? '\\n' + indentComment(comment, indent)\n : (str.endsWith(' ') ? '' : ' ') + comment;\n\nexports.indentComment = indentComment;\nexports.lineComment = lineComment;\nexports.stringifyComment = stringifyComment;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar stringify = require('./stringify.js');\nvar stringifyComment = require('./stringifyComment.js');\n\nfunction stringifyDocument(doc, options) {\n const lines = [];\n let hasDirectives = options.directives === true;\n if (options.directives !== false && doc.directives) {\n const dir = doc.directives.toString(doc);\n if (dir) {\n lines.push(dir);\n hasDirectives = true;\n }\n else if (doc.directives.docStart)\n hasDirectives = true;\n }\n if (hasDirectives)\n lines.push('---');\n const ctx = stringify.createStringifyContext(doc, options);\n const { commentString } = ctx.options;\n if (doc.commentBefore) {\n if (lines.length !== 1)\n lines.unshift('');\n const cs = commentString(doc.commentBefore);\n lines.unshift(stringifyComment.indentComment(cs, ''));\n }\n let chompKeep = false;\n let contentComment = null;\n if (doc.contents) {\n if (Node.isNode(doc.contents)) {\n if (doc.contents.spaceBefore && hasDirectives)\n lines.push('');\n if (doc.contents.commentBefore) {\n const cs = commentString(doc.contents.commentBefore);\n lines.push(stringifyComment.indentComment(cs, ''));\n }\n // top-level block scalars need to be indented if followed by a comment\n ctx.forceBlockIndent = !!doc.comment;\n contentComment = doc.contents.comment;\n }\n const onChompKeep = contentComment ? undefined : () => (chompKeep = true);\n let body = stringify.stringify(doc.contents, ctx, () => (contentComment = null), onChompKeep);\n if (contentComment)\n body += stringifyComment.lineComment(body, '', commentString(contentComment));\n if ((body[0] === '|' || body[0] === '>') &&\n lines[lines.length - 1] === '---') {\n // Top-level block scalars with a preceding doc marker ought to use the\n // same line for their header.\n lines[lines.length - 1] = `--- ${body}`;\n }\n else\n lines.push(body);\n }\n else {\n lines.push(stringify.stringify(doc.contents, ctx));\n }\n if (doc.directives?.docEnd) {\n if (doc.comment) {\n const cs = commentString(doc.comment);\n if (cs.includes('\\n')) {\n lines.push('...');\n lines.push(stringifyComment.indentComment(cs, ''));\n }\n else {\n lines.push(`... ${cs}`);\n }\n }\n else {\n lines.push('...');\n }\n }\n else {\n let dc = doc.comment;\n if (dc && chompKeep)\n dc = dc.replace(/^\\n+/, '');\n if (dc) {\n if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '')\n lines.push('');\n lines.push(stringifyComment.indentComment(commentString(dc), ''));\n }\n }\n return lines.join('\\n') + '\\n';\n}\n\nexports.stringifyDocument = stringifyDocument;\n","'use strict';\n\nfunction stringifyNumber({ format, minFractionDigits, tag, value }) {\n if (typeof value === 'bigint')\n return String(value);\n const num = typeof value === 'number' ? value : Number(value);\n if (!isFinite(num))\n return isNaN(num) ? '.nan' : num < 0 ? '-.inf' : '.inf';\n let n = JSON.stringify(value);\n if (!format &&\n minFractionDigits &&\n (!tag || tag === 'tag:yaml.org,2002:float') &&\n /^\\d/.test(n)) {\n let i = n.indexOf('.');\n if (i < 0) {\n i = n.length;\n n += '.';\n }\n let d = minFractionDigits - (n.length - i - 1);\n while (d-- > 0)\n n += '0';\n }\n return n;\n}\n\nexports.stringifyNumber = stringifyNumber;\n","'use strict';\n\nvar Node = require('../nodes/Node.js');\nvar Scalar = require('../nodes/Scalar.js');\nvar stringify = require('./stringify.js');\nvar stringifyComment = require('./stringifyComment.js');\n\nfunction stringifyPair({ key, value }, ctx, onComment, onChompKeep) {\n const { allNullValues, doc, indent, indentStep, options: { commentString, indentSeq, simpleKeys } } = ctx;\n let keyComment = (Node.isNode(key) && key.comment) || null;\n if (simpleKeys) {\n if (keyComment) {\n throw new Error('With simple keys, key nodes cannot have comments');\n }\n if (Node.isCollection(key)) {\n const msg = 'With simple keys, collection cannot be used as a key value';\n throw new Error(msg);\n }\n }\n let explicitKey = !simpleKeys &&\n (!key ||\n (keyComment && value == null && !ctx.inFlow) ||\n Node.isCollection(key) ||\n (Node.isScalar(key)\n ? key.type === Scalar.Scalar.BLOCK_FOLDED || key.type === Scalar.Scalar.BLOCK_LITERAL\n : typeof key === 'object'));\n ctx = Object.assign({}, ctx, {\n allNullValues: false,\n implicitKey: !explicitKey && (simpleKeys || !allNullValues),\n indent: indent + indentStep\n });\n let keyCommentDone = false;\n let chompKeep = false;\n let str = stringify.stringify(key, ctx, () => (keyCommentDone = true), () => (chompKeep = true));\n if (!explicitKey && !ctx.inFlow && str.length > 1024) {\n if (simpleKeys)\n throw new Error('With simple keys, single line scalar must not span more than 1024 characters');\n explicitKey = true;\n }\n if (ctx.inFlow) {\n if (allNullValues || value == null) {\n if (keyCommentDone && onComment)\n onComment();\n return str === '' ? '?' : explicitKey ? `? ${str}` : str;\n }\n }\n else if ((allNullValues && !simpleKeys) || (value == null && explicitKey)) {\n str = `? ${str}`;\n if (keyComment && !keyCommentDone) {\n str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment));\n }\n else if (chompKeep && onChompKeep)\n onChompKeep();\n return str;\n }\n if (keyCommentDone)\n keyComment = null;\n if (explicitKey) {\n if (keyComment)\n str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment));\n str = `? ${str}\\n${indent}:`;\n }\n else {\n str = `${str}:`;\n if (keyComment)\n str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment));\n }\n let vcb = '';\n let valueComment = null;\n if (Node.isNode(value)) {\n if (value.spaceBefore)\n vcb = '\\n';\n if (value.commentBefore) {\n const cs = commentString(value.commentBefore);\n vcb += `\\n${stringifyComment.indentComment(cs, ctx.indent)}`;\n }\n valueComment = value.comment;\n }\n else if (value && typeof value === 'object') {\n value = doc.createNode(value);\n }\n ctx.implicitKey = false;\n if (!explicitKey && !keyComment && Node.isScalar(value))\n ctx.indentAtStart = str.length + 1;\n chompKeep = false;\n if (!indentSeq &&\n indentStep.length >= 2 &&\n !ctx.inFlow &&\n !explicitKey &&\n Node.isSeq(value) &&\n !value.flow &&\n !value.tag &&\n !value.anchor) {\n // If indentSeq === false, consider '- ' as part of indentation where possible\n ctx.indent = ctx.indent.substr(2);\n }\n let valueCommentDone = false;\n const valueStr = stringify.stringify(value, ctx, () => (valueCommentDone = true), () => (chompKeep = true));\n let ws = ' ';\n if (vcb || keyComment) {\n if (valueStr === '' && !ctx.inFlow)\n ws = vcb === '\\n' ? '\\n\\n' : vcb;\n else\n ws = `${vcb}\\n${ctx.indent}`;\n }\n else if (!explicitKey && Node.isCollection(value)) {\n const flow = valueStr[0] === '[' || valueStr[0] === '{';\n if (!flow || valueStr.includes('\\n'))\n ws = `\\n${ctx.indent}`;\n }\n else if (valueStr === '' || valueStr[0] === '\\n')\n ws = '';\n str += ws + valueStr;\n if (ctx.inFlow) {\n if (valueCommentDone && onComment)\n onComment();\n }\n else if (valueComment && !valueCommentDone) {\n str += stringifyComment.lineComment(str, ctx.indent, commentString(valueComment));\n }\n else if (chompKeep && onChompKeep) {\n onChompKeep();\n }\n return str;\n}\n\nexports.stringifyPair = stringifyPair;\n","'use strict';\n\nvar Scalar = require('../nodes/Scalar.js');\nvar foldFlowLines = require('./foldFlowLines.js');\n\nconst getFoldOptions = (ctx) => ({\n indentAtStart: ctx.indentAtStart,\n lineWidth: ctx.options.lineWidth,\n minContentWidth: ctx.options.minContentWidth\n});\n// Also checks for lines starting with %, as parsing the output as YAML 1.1 will\n// presume that's starting a new document.\nconst containsDocumentMarker = (str) => /^(%|---|\\.\\.\\.)/m.test(str);\nfunction lineLengthOverLimit(str, lineWidth, indentLength) {\n if (!lineWidth || lineWidth < 0)\n return false;\n const limit = lineWidth - indentLength;\n const strLen = str.length;\n if (strLen <= limit)\n return false;\n for (let i = 0, start = 0; i < strLen; ++i) {\n if (str[i] === '\\n') {\n if (i - start > limit)\n return true;\n start = i + 1;\n if (strLen - start <= limit)\n return false;\n }\n }\n return true;\n}\nfunction doubleQuotedString(value, ctx) {\n const json = JSON.stringify(value);\n if (ctx.options.doubleQuotedAsJSON)\n return json;\n const { implicitKey } = ctx;\n const minMultiLineLength = ctx.options.doubleQuotedMinMultiLineLength;\n const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');\n let str = '';\n let start = 0;\n for (let i = 0, ch = json[i]; ch; ch = json[++i]) {\n if (ch === ' ' && json[i + 1] === '\\\\' && json[i + 2] === 'n') {\n // space before newline needs to be escaped to not be folded\n str += json.slice(start, i) + '\\\\ ';\n i += 1;\n start = i;\n ch = '\\\\';\n }\n if (ch === '\\\\')\n switch (json[i + 1]) {\n case 'u':\n {\n str += json.slice(start, i);\n const code = json.substr(i + 2, 4);\n switch (code) {\n case '0000':\n str += '\\\\0';\n break;\n case '0007':\n str += '\\\\a';\n break;\n case '000b':\n str += '\\\\v';\n break;\n case '001b':\n str += '\\\\e';\n break;\n case '0085':\n str += '\\\\N';\n break;\n case '00a0':\n str += '\\\\_';\n break;\n case '2028':\n str += '\\\\L';\n break;\n case '2029':\n str += '\\\\P';\n break;\n default:\n if (code.substr(0, 2) === '00')\n str += '\\\\x' + code.substr(2);\n else\n str += json.substr(i, 6);\n }\n i += 5;\n start = i + 1;\n }\n break;\n case 'n':\n if (implicitKey ||\n json[i + 2] === '\"' ||\n json.length < minMultiLineLength) {\n i += 1;\n }\n else {\n // folding will eat first newline\n str += json.slice(start, i) + '\\n\\n';\n while (json[i + 2] === '\\\\' &&\n json[i + 3] === 'n' &&\n json[i + 4] !== '\"') {\n str += '\\n';\n i += 2;\n }\n str += indent;\n // space after newline needs to be escaped to not be folded\n if (json[i + 2] === ' ')\n str += '\\\\';\n i += 1;\n start = i + 1;\n }\n break;\n default:\n i += 1;\n }\n }\n str = start ? str + json.slice(start) : json;\n return implicitKey\n ? str\n : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_QUOTED, getFoldOptions(ctx));\n}\nfunction singleQuotedString(value, ctx) {\n if (ctx.options.singleQuote === false ||\n (ctx.implicitKey && value.includes('\\n')) ||\n /[ \\t]\\n|\\n[ \\t]/.test(value) // single quoted string can't have leading or trailing whitespace around newline\n )\n return doubleQuotedString(value, ctx);\n const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');\n const res = \"'\" + value.replace(/'/g, \"''\").replace(/\\n+/g, `$&\\n${indent}`) + \"'\";\n return ctx.implicitKey\n ? res\n : foldFlowLines.foldFlowLines(res, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx));\n}\nfunction quotedString(value, ctx) {\n const { singleQuote } = ctx.options;\n let qs;\n if (singleQuote === false)\n qs = doubleQuotedString;\n else {\n const hasDouble = value.includes('\"');\n const hasSingle = value.includes(\"'\");\n if (hasDouble && !hasSingle)\n qs = singleQuotedString;\n else if (hasSingle && !hasDouble)\n qs = doubleQuotedString;\n else\n qs = singleQuote ? singleQuotedString : doubleQuotedString;\n }\n return qs(value, ctx);\n}\nfunction blockString({ comment, type, value }, ctx, onComment, onChompKeep) {\n const { blockQuote, commentString, lineWidth } = ctx.options;\n // 1. Block can't end in whitespace unless the last line is non-empty.\n // 2. Strings consisting of only whitespace are best rendered explicitly.\n if (!blockQuote || /\\n[\\t ]+$/.test(value) || /^\\s*$/.test(value)) {\n return quotedString(value, ctx);\n }\n const indent = ctx.indent ||\n (ctx.forceBlockIndent || containsDocumentMarker(value) ? ' ' : '');\n const literal = blockQuote === 'literal'\n ? true\n : blockQuote === 'folded' || type === Scalar.Scalar.BLOCK_FOLDED\n ? false\n : type === Scalar.Scalar.BLOCK_LITERAL\n ? true\n : !lineLengthOverLimit(value, lineWidth, indent.length);\n if (!value)\n return literal ? '|\\n' : '>\\n';\n // determine chomping from whitespace at value end\n let chomp;\n let endStart;\n for (endStart = value.length; endStart > 0; --endStart) {\n const ch = value[endStart - 1];\n if (ch !== '\\n' && ch !== '\\t' && ch !== ' ')\n break;\n }\n let end = value.substring(endStart);\n const endNlPos = end.indexOf('\\n');\n if (endNlPos === -1) {\n chomp = '-'; // strip\n }\n else if (value === end || endNlPos !== end.length - 1) {\n chomp = '+'; // keep\n if (onChompKeep)\n onChompKeep();\n }\n else {\n chomp = ''; // clip\n }\n if (end) {\n value = value.slice(0, -end.length);\n if (end[end.length - 1] === '\\n')\n end = end.slice(0, -1);\n end = end.replace(/\\n+(?!\\n|$)/g, `$&${indent}`);\n }\n // determine indent indicator from whitespace at value start\n let startWithSpace = false;\n let startEnd;\n let startNlPos = -1;\n for (startEnd = 0; startEnd < value.length; ++startEnd) {\n const ch = value[startEnd];\n if (ch === ' ')\n startWithSpace = true;\n else if (ch === '\\n')\n startNlPos = startEnd;\n else\n break;\n }\n let start = value.substring(0, startNlPos < startEnd ? startNlPos + 1 : startEnd);\n if (start) {\n value = value.substring(start.length);\n start = start.replace(/\\n+/g, `$&${indent}`);\n }\n const indentSize = indent ? '2' : '1'; // root is at -1\n let header = (literal ? '|' : '>') + (startWithSpace ? indentSize : '') + chomp;\n if (comment) {\n header += ' ' + commentString(comment.replace(/ ?[\\r\\n]+/g, ' '));\n if (onComment)\n onComment();\n }\n if (literal) {\n value = value.replace(/\\n+/g, `$&${indent}`);\n return `${header}\\n${indent}${start}${value}${end}`;\n }\n value = value\n .replace(/\\n+/g, '\\n$&')\n .replace(/(?:^|\\n)([\\t ].*)(?:([\\n\\t ]*)\\n(?![\\n\\t ]))?/g, '$1$2') // more-indented lines aren't folded\n // ^ more-ind. ^ empty ^ capture next empty lines only at end of indent\n .replace(/\\n+/g, `$&${indent}`);\n const body = foldFlowLines.foldFlowLines(`${start}${value}${end}`, indent, foldFlowLines.FOLD_BLOCK, getFoldOptions(ctx));\n return `${header}\\n${indent}${body}`;\n}\nfunction plainString(item, ctx, onComment, onChompKeep) {\n const { type, value } = item;\n const { actualString, implicitKey, indent, inFlow } = ctx;\n if ((implicitKey && /[\\n[\\]{},]/.test(value)) ||\n (inFlow && /[[\\]{},]/.test(value))) {\n return quotedString(value, ctx);\n }\n if (!value ||\n /^[\\n\\t ,[\\]{}#&*!|>'\"%@`]|^[?-]$|^[?-][ \\t]|[\\n:][ \\t]|[ \\t]\\n|[\\n\\t ]#|[\\n\\t :]$/.test(value)) {\n // not allowed:\n // - empty string, '-' or '?'\n // - start with an indicator character (except [?:-]) or /[?-] /\n // - '\\n ', ': ' or ' \\n' anywhere\n // - '#' not preceded by a non-space char\n // - end with ' ' or ':'\n return implicitKey || inFlow || !value.includes('\\n')\n ? quotedString(value, ctx)\n : blockString(item, ctx, onComment, onChompKeep);\n }\n if (!implicitKey &&\n !inFlow &&\n type !== Scalar.Scalar.PLAIN &&\n value.includes('\\n')) {\n // Where allowed & type not set explicitly, prefer block style for multiline strings\n return blockString(item, ctx, onComment, onChompKeep);\n }\n if (indent === '' && containsDocumentMarker(value)) {\n ctx.forceBlockIndent = true;\n return blockString(item, ctx, onComment, onChompKeep);\n }\n const str = value.replace(/\\n+/g, `$&\\n${indent}`);\n // Verify that output will be parsed as a string, as e.g. plain numbers and\n // booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'),\n // and others in v1.1.\n if (actualString) {\n const test = (tag) => tag.default && tag.tag !== 'tag:yaml.org,2002:str' && tag.test?.test(str);\n const { compat, tags } = ctx.doc.schema;\n if (tags.some(test) || compat?.some(test))\n return quotedString(value, ctx);\n }\n return implicitKey\n ? str\n : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx));\n}\nfunction stringifyString(item, ctx, onComment, onChompKeep) {\n const { implicitKey, inFlow } = ctx;\n const ss = typeof item.value === 'string'\n ? item\n : Object.assign({}, item, { value: String(item.value) });\n let { type } = item;\n if (type !== Scalar.Scalar.QUOTE_DOUBLE) {\n // force double quotes on control characters & unpaired surrogates\n if (/[\\x00-\\x08\\x0b-\\x1f\\x7f-\\x9f\\u{D800}-\\u{DFFF}]/u.test(ss.value))\n type = Scalar.Scalar.QUOTE_DOUBLE;\n }\n const _stringify = (_type) => {\n switch (_type) {\n case Scalar.Scalar.BLOCK_FOLDED:\n case Scalar.Scalar.BLOCK_LITERAL:\n return implicitKey || inFlow\n ? quotedString(ss.value, ctx) // blocks are not valid inside flow containers\n : blockString(ss, ctx, onComment, onChompKeep);\n case Scalar.Scalar.QUOTE_DOUBLE:\n return doubleQuotedString(ss.value, ctx);\n case Scalar.Scalar.QUOTE_SINGLE:\n return singleQuotedString(ss.value, ctx);\n case Scalar.Scalar.PLAIN:\n return plainString(ss, ctx, onComment, onChompKeep);\n default:\n return null;\n }\n };\n let res = _stringify(type);\n if (res === null) {\n const { defaultKeyType, defaultStringType } = ctx.options;\n const t = (implicitKey && defaultKeyType) || defaultStringType;\n res = _stringify(t);\n if (res === null)\n throw new Error(`Unsupported default string type ${t}`);\n }\n return res;\n}\n\nexports.stringifyString = stringifyString;\n","'use strict';\n\nvar Node = require('./nodes/Node.js');\n\nconst BREAK = Symbol('break visit');\nconst SKIP = Symbol('skip children');\nconst REMOVE = Symbol('remove node');\n/**\n * Apply a visitor to an AST node or document.\n *\n * Walks through the tree (depth-first) starting from `node`, calling a\n * `visitor` function with three arguments:\n * - `key`: For sequence values and map `Pair`, the node's index in the\n * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.\n * `null` for the root node.\n * - `node`: The current node.\n * - `path`: The ancestry of the current node.\n *\n * The return value of the visitor may be used to control the traversal:\n * - `undefined` (default): Do nothing and continue\n * - `visit.SKIP`: Do not visit the children of this node, continue with next\n * sibling\n * - `visit.BREAK`: Terminate traversal completely\n * - `visit.REMOVE`: Remove the current node, then continue with the next one\n * - `Node`: Replace the current node, then continue by visiting it\n * - `number`: While iterating the items of a sequence or map, set the index\n * of the next step. This is useful especially if the index of the current\n * node has changed.\n *\n * If `visitor` is a single function, it will be called with all values\n * encountered in the tree, including e.g. `null` values. Alternatively,\n * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,\n * `Alias` and `Scalar` node. To define the same visitor function for more than\n * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)\n * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most\n * specific defined one will be used for each node.\n */\nfunction visit(node, visitor) {\n const visitor_ = initVisitor(visitor);\n if (Node.isDocument(node)) {\n const cd = visit_(null, node.contents, visitor_, Object.freeze([node]));\n if (cd === REMOVE)\n node.contents = null;\n }\n else\n visit_(null, node, visitor_, Object.freeze([]));\n}\n// Without the `as symbol` casts, TS declares these in the `visit`\n// namespace using `var`, but then complains about that because\n// `unique symbol` must be `const`.\n/** Terminate visit traversal completely */\nvisit.BREAK = BREAK;\n/** Do not visit the children of the current node */\nvisit.SKIP = SKIP;\n/** Remove the current node */\nvisit.REMOVE = REMOVE;\nfunction visit_(key, node, visitor, path) {\n const ctrl = callVisitor(key, node, visitor, path);\n if (Node.isNode(ctrl) || Node.isPair(ctrl)) {\n replaceNode(key, path, ctrl);\n return visit_(key, ctrl, visitor, path);\n }\n if (typeof ctrl !== 'symbol') {\n if (Node.isCollection(node)) {\n path = Object.freeze(path.concat(node));\n for (let i = 0; i < node.items.length; ++i) {\n const ci = visit_(i, node.items[i], visitor, path);\n if (typeof ci === 'number')\n i = ci - 1;\n else if (ci === BREAK)\n return BREAK;\n else if (ci === REMOVE) {\n node.items.splice(i, 1);\n i -= 1;\n }\n }\n }\n else if (Node.isPair(node)) {\n path = Object.freeze(path.concat(node));\n const ck = visit_('key', node.key, visitor, path);\n if (ck === BREAK)\n return BREAK;\n else if (ck === REMOVE)\n node.key = null;\n const cv = visit_('value', node.value, visitor, path);\n if (cv === BREAK)\n return BREAK;\n else if (cv === REMOVE)\n node.value = null;\n }\n }\n return ctrl;\n}\n/**\n * Apply an async visitor to an AST node or document.\n *\n * Walks through the tree (depth-first) starting from `node`, calling a\n * `visitor` function with three arguments:\n * - `key`: For sequence values and map `Pair`, the node's index in the\n * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.\n * `null` for the root node.\n * - `node`: The current node.\n * - `path`: The ancestry of the current node.\n *\n * The return value of the visitor may be used to control the traversal:\n * - `Promise`: Must resolve to one of the following values\n * - `undefined` (default): Do nothing and continue\n * - `visit.SKIP`: Do not visit the children of this node, continue with next\n * sibling\n * - `visit.BREAK`: Terminate traversal completely\n * - `visit.REMOVE`: Remove the current node, then continue with the next one\n * - `Node`: Replace the current node, then continue by visiting it\n * - `number`: While iterating the items of a sequence or map, set the index\n * of the next step. This is useful especially if the index of the current\n * node has changed.\n *\n * If `visitor` is a single function, it will be called with all values\n * encountered in the tree, including e.g. `null` values. Alternatively,\n * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,\n * `Alias` and `Scalar` node. To define the same visitor function for more than\n * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)\n * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most\n * specific defined one will be used for each node.\n */\nasync function visitAsync(node, visitor) {\n const visitor_ = initVisitor(visitor);\n if (Node.isDocument(node)) {\n const cd = await visitAsync_(null, node.contents, visitor_, Object.freeze([node]));\n if (cd === REMOVE)\n node.contents = null;\n }\n else\n await visitAsync_(null, node, visitor_, Object.freeze([]));\n}\n// Without the `as symbol` casts, TS declares these in the `visit`\n// namespace using `var`, but then complains about that because\n// `unique symbol` must be `const`.\n/** Terminate visit traversal completely */\nvisitAsync.BREAK = BREAK;\n/** Do not visit the children of the current node */\nvisitAsync.SKIP = SKIP;\n/** Remove the current node */\nvisitAsync.REMOVE = REMOVE;\nasync function visitAsync_(key, node, visitor, path) {\n const ctrl = await callVisitor(key, node, visitor, path);\n if (Node.isNode(ctrl) || Node.isPair(ctrl)) {\n replaceNode(key, path, ctrl);\n return visitAsync_(key, ctrl, visitor, path);\n }\n if (typeof ctrl !== 'symbol') {\n if (Node.isCollection(node)) {\n path = Object.freeze(path.concat(node));\n for (let i = 0; i < node.items.length; ++i) {\n const ci = await visitAsync_(i, node.items[i], visitor, path);\n if (typeof ci === 'number')\n i = ci - 1;\n else if (ci === BREAK)\n return BREAK;\n else if (ci === REMOVE) {\n node.items.splice(i, 1);\n i -= 1;\n }\n }\n }\n else if (Node.isPair(node)) {\n path = Object.freeze(path.concat(node));\n const ck = await visitAsync_('key', node.key, visitor, path);\n if (ck === BREAK)\n return BREAK;\n else if (ck === REMOVE)\n node.key = null;\n const cv = await visitAsync_('value', node.value, visitor, path);\n if (cv === BREAK)\n return BREAK;\n else if (cv === REMOVE)\n node.value = null;\n }\n }\n return ctrl;\n}\nfunction initVisitor(visitor) {\n if (typeof visitor === 'object' &&\n (visitor.Collection || visitor.Node || visitor.Value)) {\n return Object.assign({\n Alias: visitor.Node,\n Map: visitor.Node,\n Scalar: visitor.Node,\n Seq: visitor.Node\n }, visitor.Value && {\n Map: visitor.Value,\n Scalar: visitor.Value,\n Seq: visitor.Value\n }, visitor.Collection && {\n Map: visitor.Collection,\n Seq: visitor.Collection\n }, visitor);\n }\n return visitor;\n}\nfunction callVisitor(key, node, visitor, path) {\n if (typeof visitor === 'function')\n return visitor(key, node, path);\n if (Node.isMap(node))\n return visitor.Map?.(key, node, path);\n if (Node.isSeq(node))\n return visitor.Seq?.(key, node, path);\n if (Node.isPair(node))\n return visitor.Pair?.(key, node, path);\n if (Node.isScalar(node))\n return visitor.Scalar?.(key, node, path);\n if (Node.isAlias(node))\n return visitor.Alias?.(key, node, path);\n return undefined;\n}\nfunction replaceNode(key, path, node) {\n const parent = path[path.length - 1];\n if (Node.isCollection(parent)) {\n parent.items[key] = node;\n }\n else if (Node.isPair(parent)) {\n if (key === 'key')\n parent.key = node;\n else\n parent.value = node;\n }\n else if (Node.isDocument(parent)) {\n parent.contents = node;\n }\n else {\n const pt = Node.isAlias(parent) ? 'alias' : 'scalar';\n throw new Error(`Cannot replace node with ${pt} parent`);\n }\n}\n\nexports.visit = visit;\nexports.visitAsync = visitAsync;\n","const ANSI_BACKGROUND_OFFSET = 10;\n\nconst wrapAnsi16 = (offset = 0) => code => `\\u001B[${code + offset}m`;\n\nconst wrapAnsi256 = (offset = 0) => code => `\\u001B[${38 + offset};5;${code}m`;\n\nconst wrapAnsi16m = (offset = 0) => (red, green, blue) => `\\u001B[${38 + offset};2;${red};${green};${blue}m`;\n\nfunction assembleStyles() {\n\tconst codes = new Map();\n\tconst styles = {\n\t\tmodifier: {\n\t\t\treset: [0, 0],\n\t\t\t// 21 isn't widely supported and 22 does the same thing\n\t\t\tbold: [1, 22],\n\t\t\tdim: [2, 22],\n\t\t\titalic: [3, 23],\n\t\t\tunderline: [4, 24],\n\t\t\toverline: [53, 55],\n\t\t\tinverse: [7, 27],\n\t\t\thidden: [8, 28],\n\t\t\tstrikethrough: [9, 29]\n\t\t},\n\t\tcolor: {\n\t\t\tblack: [30, 39],\n\t\t\tred: [31, 39],\n\t\t\tgreen: [32, 39],\n\t\t\tyellow: [33, 39],\n\t\t\tblue: [34, 39],\n\t\t\tmagenta: [35, 39],\n\t\t\tcyan: [36, 39],\n\t\t\twhite: [37, 39],\n\n\t\t\t// Bright color\n\t\t\tblackBright: [90, 39],\n\t\t\tredBright: [91, 39],\n\t\t\tgreenBright: [92, 39],\n\t\t\tyellowBright: [93, 39],\n\t\t\tblueBright: [94, 39],\n\t\t\tmagentaBright: [95, 39],\n\t\t\tcyanBright: [96, 39],\n\t\t\twhiteBright: [97, 39]\n\t\t},\n\t\tbgColor: {\n\t\t\tbgBlack: [40, 49],\n\t\t\tbgRed: [41, 49],\n\t\t\tbgGreen: [42, 49],\n\t\t\tbgYellow: [43, 49],\n\t\t\tbgBlue: [44, 49],\n\t\t\tbgMagenta: [45, 49],\n\t\t\tbgCyan: [46, 49],\n\t\t\tbgWhite: [47, 49],\n\n\t\t\t// Bright color\n\t\t\tbgBlackBright: [100, 49],\n\t\t\tbgRedBright: [101, 49],\n\t\t\tbgGreenBright: [102, 49],\n\t\t\tbgYellowBright: [103, 49],\n\t\t\tbgBlueBright: [104, 49],\n\t\t\tbgMagentaBright: [105, 49],\n\t\t\tbgCyanBright: [106, 49],\n\t\t\tbgWhiteBright: [107, 49]\n\t\t}\n\t};\n\n\t// Alias bright black as gray (and grey)\n\tstyles.color.gray = styles.color.blackBright;\n\tstyles.bgColor.bgGray = styles.bgColor.bgBlackBright;\n\tstyles.color.grey = styles.color.blackBright;\n\tstyles.bgColor.bgGrey = styles.bgColor.bgBlackBright;\n\n\tfor (const [groupName, group] of Object.entries(styles)) {\n\t\tfor (const [styleName, style] of Object.entries(group)) {\n\t\t\tstyles[styleName] = {\n\t\t\t\topen: `\\u001B[${style[0]}m`,\n\t\t\t\tclose: `\\u001B[${style[1]}m`\n\t\t\t};\n\n\t\t\tgroup[styleName] = styles[styleName];\n\n\t\t\tcodes.set(style[0], style[1]);\n\t\t}\n\n\t\tObject.defineProperty(styles, groupName, {\n\t\t\tvalue: group,\n\t\t\tenumerable: false\n\t\t});\n\t}\n\n\tObject.defineProperty(styles, 'codes', {\n\t\tvalue: codes,\n\t\tenumerable: false\n\t});\n\n\tstyles.color.close = '\\u001B[39m';\n\tstyles.bgColor.close = '\\u001B[49m';\n\n\tstyles.color.ansi = wrapAnsi16();\n\tstyles.color.ansi256 = wrapAnsi256();\n\tstyles.color.ansi16m = wrapAnsi16m();\n\tstyles.bgColor.ansi = wrapAnsi16(ANSI_BACKGROUND_OFFSET);\n\tstyles.bgColor.ansi256 = wrapAnsi256(ANSI_BACKGROUND_OFFSET);\n\tstyles.bgColor.ansi16m = wrapAnsi16m(ANSI_BACKGROUND_OFFSET);\n\n\t// From https://github.com/Qix-/color-convert/blob/3f0e0d4e92e235796ccb17f6e85c72094a651f49/conversions.js\n\tObject.defineProperties(styles, {\n\t\trgbToAnsi256: {\n\t\t\tvalue: (red, green, blue) => {\n\t\t\t\t// We use the extended greyscale palette here, with the exception of\n\t\t\t\t// black and white. normal palette only has 4 greyscale shades.\n\t\t\t\tif (red === green && green === blue) {\n\t\t\t\t\tif (red < 8) {\n\t\t\t\t\t\treturn 16;\n\t\t\t\t\t}\n\n\t\t\t\t\tif (red > 248) {\n\t\t\t\t\t\treturn 231;\n\t\t\t\t\t}\n\n\t\t\t\t\treturn Math.round(((red - 8) / 247) * 24) + 232;\n\t\t\t\t}\n\n\t\t\t\treturn 16 +\n\t\t\t\t\t(36 * Math.round(red / 255 * 5)) +\n\t\t\t\t\t(6 * Math.round(green / 255 * 5)) +\n\t\t\t\t\tMath.round(blue / 255 * 5);\n\t\t\t},\n\t\t\tenumerable: false\n\t\t},\n\t\thexToRgb: {\n\t\t\tvalue: hex => {\n\t\t\t\tconst matches = /(?[a-f\\d]{6}|[a-f\\d]{3})/i.exec(hex.toString(16));\n\t\t\t\tif (!matches) {\n\t\t\t\t\treturn [0, 0, 0];\n\t\t\t\t}\n\n\t\t\t\tlet {colorString} = matches.groups;\n\n\t\t\t\tif (colorString.length === 3) {\n\t\t\t\t\tcolorString = colorString.split('').map(character => character + character).join('');\n\t\t\t\t}\n\n\t\t\t\tconst integer = Number.parseInt(colorString, 16);\n\n\t\t\t\treturn [\n\t\t\t\t\t(integer >> 16) & 0xFF,\n\t\t\t\t\t(integer >> 8) & 0xFF,\n\t\t\t\t\tinteger & 0xFF\n\t\t\t\t];\n\t\t\t},\n\t\t\tenumerable: false\n\t\t},\n\t\thexToAnsi256: {\n\t\t\tvalue: hex => styles.rgbToAnsi256(...styles.hexToRgb(hex)),\n\t\t\tenumerable: false\n\t\t},\n\t\tansi256ToAnsi: {\n\t\t\tvalue: code => {\n\t\t\t\tif (code < 8) {\n\t\t\t\t\treturn 30 + code;\n\t\t\t\t}\n\n\t\t\t\tif (code < 16) {\n\t\t\t\t\treturn 90 + (code - 8);\n\t\t\t\t}\n\n\t\t\t\tlet red;\n\t\t\t\tlet green;\n\t\t\t\tlet blue;\n\n\t\t\t\tif (code >= 232) {\n\t\t\t\t\tred = (((code - 232) * 10) + 8) / 255;\n\t\t\t\t\tgreen = red;\n\t\t\t\t\tblue = red;\n\t\t\t\t} else {\n\t\t\t\t\tcode -= 16;\n\n\t\t\t\t\tconst remainder = code % 36;\n\n\t\t\t\t\tred = Math.floor(code / 36) / 5;\n\t\t\t\t\tgreen = Math.floor(remainder / 6) / 5;\n\t\t\t\t\tblue = (remainder % 6) / 5;\n\t\t\t\t}\n\n\t\t\t\tconst value = Math.max(red, green, blue) * 2;\n\n\t\t\t\tif (value === 0) {\n\t\t\t\t\treturn 30;\n\t\t\t\t}\n\n\t\t\t\tlet result = 30 + ((Math.round(blue) << 2) | (Math.round(green) << 1) | Math.round(red));\n\n\t\t\t\tif (value === 2) {\n\t\t\t\t\tresult += 60;\n\t\t\t\t}\n\n\t\t\t\treturn result;\n\t\t\t},\n\t\t\tenumerable: false\n\t\t},\n\t\trgbToAnsi: {\n\t\t\tvalue: (red, green, blue) => styles.ansi256ToAnsi(styles.rgbToAnsi256(red, green, blue)),\n\t\t\tenumerable: false\n\t\t},\n\t\thexToAnsi: {\n\t\t\tvalue: hex => styles.ansi256ToAnsi(styles.hexToAnsi256(hex)),\n\t\t\tenumerable: false\n\t\t}\n\t});\n\n\treturn styles;\n}\n\nconst ansiStyles = assembleStyles();\n\nexport default ansiStyles;\n","// The module cache\nvar __webpack_module_cache__ = {};\n\n// The require function\nfunction __webpack_require__(moduleId) {\n\t// Check if module is in cache\n\tvar cachedModule = __webpack_module_cache__[moduleId];\n\tif (cachedModule !== undefined) {\n\t\treturn cachedModule.exports;\n\t}\n\t// Create a new module (and put it into the cache)\n\tvar module = __webpack_module_cache__[moduleId] = {\n\t\t// no module.id needed\n\t\t// no module.loaded needed\n\t\texports: {}\n\t};\n\n\t// Execute the module function\n\tvar threw = true;\n\ttry {\n\t\t__webpack_modules__[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\t\tthrew = false;\n\t} finally {\n\t\tif(threw) delete __webpack_module_cache__[moduleId];\n\t}\n\n\t// Return the exports of the module\n\treturn module.exports;\n}\n\n","// define getter functions for harmony exports\n__webpack_require__.d = (exports, definition) => {\n\tfor(var key in definition) {\n\t\tif(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {\n\t\t\tObject.defineProperty(exports, key, { enumerable: true, get: definition[key] });\n\t\t}\n\t}\n};","__webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))","// define __esModule on exports\n__webpack_require__.r = (exports) => {\n\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n\t}\n\tObject.defineProperty(exports, '__esModule', { value: true });\n};","\nif (typeof __webpack_require__ !== 'undefined') __webpack_require__.ab = __dirname + \"/\";","","// startup\n// Load entry module and return exports\n// This entry module is referenced by other modules so it can't be inlined\nvar __webpack_exports__ = __webpack_require__(3109);\n",""],"names":[],"sourceRoot":""} \ No newline at end of file +{"version":3,"file":"index.js","mappings":";;;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjUA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1RA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5lBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrYA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChgCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACn/CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChqDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzCA;;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7GA;AACA;;;;;;;;;ACDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC16EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5DA;;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;ACvNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;AC7BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;ACPA;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;;;;ACNA;AACA;;;;AEDA;AACA;AACA;AACA","sources":["../webpack://dependency-review-action/./lib/dependency-graph.js","../webpack://dependency-review-action/./lib/licenses.js","../webpack://dependency-review-action/./lib/main.js","../webpack://dependency-review-action/./lib/schemas.js","../webpack://dependency-review-action/./node_modules/@actions/core/lib/command.js","../webpack://dependency-review-action/./node_modules/@actions/core/lib/core.js","../webpack://dependency-review-action/./node_modules/@actions/core/lib/file-command.js","../webpack://dependency-review-action/./node_modules/@actions/core/lib/oidc-utils.js","../webpack://dependency-review-action/./node_modules/@actions/core/lib/summary.js","../webpack://dependency-review-action/./node_modules/@actions/core/lib/utils.js","../webpack://dependency-review-action/./node_modules/@actions/github/lib/context.js","../webpack://dependency-review-action/./node_modules/@actions/github/lib/github.js","../webpack://dependency-review-action/./node_modules/@actions/github/lib/internal/utils.js","../webpack://dependency-review-action/./node_modules/@actions/github/lib/utils.js","../webpack://dependency-review-action/./node_modules/@actions/http-client/lib/auth.js","../webpack://dependency-review-action/./node_modules/@actions/http-client/lib/index.js","../webpack://dependency-review-action/./node_modules/@actions/http-client/lib/proxy.js","../webpack://dependency-review-action/./node_modules/@octokit/auth-token/dist-node/index.js","../webpack://dependency-review-action/./node_modules/@octokit/core/dist-node/index.js","../webpack://dependency-review-action/./node_modules/@octokit/endpoint/dist-node/index.js","../webpack://dependency-review-action/./node_modules/@octokit/graphql/dist-node/index.js","../webpack://dependency-review-action/./node_modules/@octokit/plugin-paginate-rest/dist-node/index.js","../webpack://dependency-review-action/./node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js","../webpack://dependency-review-action/./node_modules/@octokit/plugin-retry/dist-node/index.js","../webpack://dependency-review-action/./node_modules/@octokit/request-error/dist-node/index.js","../webpack://dependency-review-action/./node_modules/@octokit/request/dist-node/index.js","../webpack://dependency-review-action/./node_modules/before-after-hook/index.js","../webpack://dependency-review-action/./node_modules/before-after-hook/lib/add.js","../webpack://dependency-review-action/./node_modules/before-after-hook/lib/register.js","../webpack://dependency-review-action/./node_modules/before-after-hook/lib/remove.js","../webpack://dependency-review-action/./node_modules/bottleneck/light.js","../webpack://dependency-review-action/./node_modules/deprecation/dist-node/index.js","../webpack://dependency-review-action/./node_modules/is-plain-object/dist/is-plain-object.js","../webpack://dependency-review-action/./node_modules/node-fetch/lib/index.js","../webpack://dependency-review-action/./node_modules/node-fetch/node_modules/tr46/index.js","../webpack://dependency-review-action/./node_modules/node-fetch/node_modules/webidl-conversions/lib/index.js","../webpack://dependency-review-action/./node_modules/node-fetch/node_modules/whatwg-url/lib/URL-impl.js","../webpack://dependency-review-action/./node_modules/node-fetch/node_modules/whatwg-url/lib/URL.js","../webpack://dependency-review-action/./node_modules/node-fetch/node_modules/whatwg-url/lib/public-api.js","../webpack://dependency-review-action/./node_modules/node-fetch/node_modules/whatwg-url/lib/url-state-machine.js","../webpack://dependency-review-action/./node_modules/node-fetch/node_modules/whatwg-url/lib/utils.js","../webpack://dependency-review-action/./node_modules/once/once.js","../webpack://dependency-review-action/./node_modules/tunnel/index.js","../webpack://dependency-review-action/./node_modules/tunnel/lib/tunnel.js","../webpack://dependency-review-action/./node_modules/universal-user-agent/dist-node/index.js","../webpack://dependency-review-action/./node_modules/wrappy/wrappy.js","../webpack://dependency-review-action/./node_modules/zod/lib/ZodError.js","../webpack://dependency-review-action/./node_modules/zod/lib/external.js","../webpack://dependency-review-action/./node_modules/zod/lib/helpers/errorUtil.js","../webpack://dependency-review-action/./node_modules/zod/lib/helpers/parseUtil.js","../webpack://dependency-review-action/./node_modules/zod/lib/helpers/typeAliases.js","../webpack://dependency-review-action/./node_modules/zod/lib/helpers/util.js","../webpack://dependency-review-action/./node_modules/zod/lib/index.js","../webpack://dependency-review-action/./node_modules/zod/lib/types.js","../webpack://dependency-review-action/./src/config.ts","../webpack://dependency-review-action/./src/filter.ts","../webpack://dependency-review-action/./src/schemas.ts","../webpack://dependency-review-action/./node_modules/@vercel/ncc/dist/ncc/@@notfound.js","../webpack://dependency-review-action/external node-commonjs \"assert\"","../webpack://dependency-review-action/external node-commonjs \"events\"","../webpack://dependency-review-action/external node-commonjs \"fs\"","../webpack://dependency-review-action/external node-commonjs \"http\"","../webpack://dependency-review-action/external node-commonjs \"https\"","../webpack://dependency-review-action/external node-commonjs \"net\"","../webpack://dependency-review-action/external node-commonjs \"os\"","../webpack://dependency-review-action/external node-commonjs \"path\"","../webpack://dependency-review-action/external node-commonjs \"punycode\"","../webpack://dependency-review-action/external node-commonjs \"stream\"","../webpack://dependency-review-action/external node-commonjs \"tls\"","../webpack://dependency-review-action/external node-commonjs \"url\"","../webpack://dependency-review-action/external node-commonjs \"util\"","../webpack://dependency-review-action/external node-commonjs \"zlib\"","../webpack://dependency-review-action/./node_modules/ansi-styles/index.js","../webpack://dependency-review-action/webpack/bootstrap","../webpack://dependency-review-action/webpack/runtime/define property getters","../webpack://dependency-review-action/webpack/runtime/hasOwnProperty shorthand","../webpack://dependency-review-action/webpack/runtime/make namespace object","../webpack://dependency-review-action/webpack/runtime/compat","../webpack://dependency-review-action/webpack/before-startup","../webpack://dependency-review-action/webpack/startup","../webpack://dependency-review-action/webpack/after-startup"],"sourcesContent":["\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.compare = void 0;\nconst core = __importStar(require(\"@actions/core\"));\nconst githubUtils = __importStar(require(\"@actions/github/lib/utils\"));\nconst retry = __importStar(require(\"@octokit/plugin-retry\"));\nconst schemas_1 = require(\"./schemas\");\nconst retryingOctokit = githubUtils.GitHub.plugin(retry.retry);\nconst octo = new retryingOctokit(githubUtils.getOctokitOptions(core.getInput('repo-token', { required: true })));\nfunction compare({ owner, repo, baseRef, headRef }) {\n return __awaiter(this, void 0, void 0, function* () {\n const changes = yield octo.paginate('GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}', {\n owner,\n repo,\n basehead: `${baseRef}...${headRef}`\n });\n return schemas_1.ChangesSchema.parse(changes);\n });\n}\nexports.compare = compare;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getDeniedLicenseChanges = void 0;\n/**\n * Loops through a list of changes, filtering and returning the\n * ones that don't conform to the licenses allow/deny lists.\n *\n * Keep in mind that we don't let users specify both an allow and a deny\n * list in their config files, so this code works under the assumption that\n * one of the two list parameters will be empty. If both lists are provided,\n * we will ignore the deny list.\n * @param {Change[]} changes The list of changes to filter.\n * @param { { allow?: string[], deny?: string[]}} licenses An object with `allow`/`deny` keys, each containing a list of licenses.\n * @returns {[Array, Array 0) {\n printChangeVulnerabilities(change);\n failed = true;\n }\n }\n let [licenseErrors, unknownLicenses] = (0, licenses_1.getDeniedLicenseChanges)(changes, licenses);\n if (licenseErrors.length > 0) {\n printLicensesError(licenseErrors, licenses);\n printNullLicenses(unknownLicenses);\n core.setFailed('Dependency review detected incompatible licenses.');\n }\n if (failed) {\n core.setFailed('Dependency review detected vulnerable packages.');\n }\n else {\n core.info(`Dependency review did not detect any vulnerable packages with severity level \"${minSeverity}\" or higher.`);\n }\n }\n catch (error) {\n if (error instanceof request_error_1.RequestError && error.status === 404) {\n core.setFailed(`Dependency review could not obtain dependency data for the specified owner, repository, or revision range.`);\n }\n else if (error instanceof request_error_1.RequestError && error.status === 403) {\n core.setFailed(`Dependency review is not supported on this repository. Please ensure that Dependency graph is enabled, see https://github.com/${github.context.repo.owner}/${github.context.repo.repo}/settings/security_analysis`);\n }\n else {\n if (error instanceof Error) {\n core.setFailed(error.message);\n }\n else {\n core.setFailed('Unexpected fatal error');\n }\n }\n }\n });\n}\nfunction printChangeVulnerabilities(change) {\n for (const vuln of change.vulnerabilities) {\n core.info(`${ansi_styles_1.default.bold.open}${change.manifest} » ${change.name}@${change.version}${ansi_styles_1.default.bold.close} – ${vuln.advisory_summary} ${renderSeverity(vuln.severity)}`);\n core.info(` ↪ ${vuln.advisory_url}`);\n }\n}\nfunction renderSeverity(severity) {\n const color = {\n critical: 'red',\n high: 'red',\n moderate: 'yellow',\n low: 'grey'\n }[severity];\n return `${ansi_styles_1.default.color[color].open}(${severity} severity)${ansi_styles_1.default.color[color].close}`;\n}\nfunction printLicensesError(changes, licenses) {\n if (changes.length == 0) {\n return;\n }\n let { allow = [], deny = [] } = licenses;\n core.info('\\nThe following dependencies have incompatible licenses:\\n');\n for (const change of changes) {\n core.info(`${ansi_styles_1.default.bold.open}${change.manifest} » ${change.name}@${change.version}${ansi_styles_1.default.bold.close} – License: ${ansi_styles_1.default.color.red.open}${change.license}${ansi_styles_1.default.color.red.close}`);\n }\n}\nfunction printNullLicenses(changes) {\n core.info('\\nWe could not detect a license for the following dependencies:\\n');\n for (const change of changes) {\n core.info(`${ansi_styles_1.default.bold.open}${change.manifest} » ${change.name}@${change.version}${ansi_styles_1.default.bold.close}`);\n }\n}\nrun();\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ChangesSchema = exports.ConfigurationOptionsSchema = exports.PullRequestSchema = exports.ChangeSchema = exports.SEVERITIES = void 0;\nconst z = __importStar(require(\"zod\"));\nexports.SEVERITIES = ['critical', 'high', 'moderate', 'low'];\nexports.ChangeSchema = z.object({\n change_type: z.enum(['added', 'removed']),\n manifest: z.string(),\n ecosystem: z.string(),\n name: z.string(),\n version: z.string(),\n package_url: z.string(),\n license: z.string().nullable(),\n source_repository_url: z.string().nullable(),\n vulnerabilities: z\n .array(z.object({\n severity: z.enum(['critical', 'high', 'moderate', 'low']),\n advisory_ghsa_id: z.string(),\n advisory_summary: z.string(),\n advisory_url: z.string()\n }))\n .optional()\n .default([])\n});\nexports.PullRequestSchema = z.object({\n number: z.number(),\n base: z.object({ sha: z.string() }),\n head: z.object({ sha: z.string() })\n});\nexports.ConfigurationOptionsSchema = z\n .object({\n fail_on_severity: z.enum(exports.SEVERITIES).default('low'),\n allow_licenses: z.array(z.string()).default([]),\n deny_licenses: z.array(z.string()).default([])\n})\n .partial()\n .refine(obj => !(obj.allow_licenses && obj.deny_licenses), \"Your workflow file has both an allow_licenses list and deny_licenses list, but you can only set one or the other.\");\nexports.ChangesSchema = z.array(exports.ChangeSchema);\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.issue = exports.issueCommand = void 0;\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\n/**\n * Commands\n *\n * Command Format:\n * ::name key=value,key=value::message\n *\n * Examples:\n * ::warning::This is the message\n * ::set-env name=MY_VAR::some value\n */\nfunction issueCommand(command, properties, message) {\n const cmd = new Command(command, properties, message);\n process.stdout.write(cmd.toString() + os.EOL);\n}\nexports.issueCommand = issueCommand;\nfunction issue(name, message = '') {\n issueCommand(name, {}, message);\n}\nexports.issue = issue;\nconst CMD_STRING = '::';\nclass Command {\n constructor(command, properties, message) {\n if (!command) {\n command = 'missing.command';\n }\n this.command = command;\n this.properties = properties;\n this.message = message;\n }\n toString() {\n let cmdStr = CMD_STRING + this.command;\n if (this.properties && Object.keys(this.properties).length > 0) {\n cmdStr += ' ';\n let first = true;\n for (const key in this.properties) {\n if (this.properties.hasOwnProperty(key)) {\n const val = this.properties[key];\n if (val) {\n if (first) {\n first = false;\n }\n else {\n cmdStr += ',';\n }\n cmdStr += `${key}=${escapeProperty(val)}`;\n }\n }\n }\n }\n cmdStr += `${CMD_STRING}${escapeData(this.message)}`;\n return cmdStr;\n }\n}\nfunction escapeData(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A');\n}\nfunction escapeProperty(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A')\n .replace(/:/g, '%3A')\n .replace(/,/g, '%2C');\n}\n//# sourceMappingURL=command.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;\nconst command_1 = require(\"./command\");\nconst file_command_1 = require(\"./file-command\");\nconst utils_1 = require(\"./utils\");\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\nconst oidc_utils_1 = require(\"./oidc-utils\");\n/**\n * The code to exit an action\n */\nvar ExitCode;\n(function (ExitCode) {\n /**\n * A code indicating that the action was successful\n */\n ExitCode[ExitCode[\"Success\"] = 0] = \"Success\";\n /**\n * A code indicating that the action was a failure\n */\n ExitCode[ExitCode[\"Failure\"] = 1] = \"Failure\";\n})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));\n//-----------------------------------------------------------------------\n// Variables\n//-----------------------------------------------------------------------\n/**\n * Sets env variable for this action and future actions in the job\n * @param name the name of the variable to set\n * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction exportVariable(name, val) {\n const convertedVal = utils_1.toCommandValue(val);\n process.env[name] = convertedVal;\n const filePath = process.env['GITHUB_ENV'] || '';\n if (filePath) {\n const delimiter = '_GitHubActionsFileCommandDelimeter_';\n const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;\n file_command_1.issueCommand('ENV', commandValue);\n }\n else {\n command_1.issueCommand('set-env', { name }, convertedVal);\n }\n}\nexports.exportVariable = exportVariable;\n/**\n * Registers a secret which will get masked from logs\n * @param secret value of the secret\n */\nfunction setSecret(secret) {\n command_1.issueCommand('add-mask', {}, secret);\n}\nexports.setSecret = setSecret;\n/**\n * Prepends inputPath to the PATH (for this action and future actions)\n * @param inputPath\n */\nfunction addPath(inputPath) {\n const filePath = process.env['GITHUB_PATH'] || '';\n if (filePath) {\n file_command_1.issueCommand('PATH', inputPath);\n }\n else {\n command_1.issueCommand('add-path', {}, inputPath);\n }\n process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;\n}\nexports.addPath = addPath;\n/**\n * Gets the value of an input.\n * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.\n * Returns an empty string if the value is not defined.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string\n */\nfunction getInput(name, options) {\n const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';\n if (options && options.required && !val) {\n throw new Error(`Input required and not supplied: ${name}`);\n }\n if (options && options.trimWhitespace === false) {\n return val;\n }\n return val.trim();\n}\nexports.getInput = getInput;\n/**\n * Gets the values of an multiline input. Each value is also trimmed.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string[]\n *\n */\nfunction getMultilineInput(name, options) {\n const inputs = getInput(name, options)\n .split('\\n')\n .filter(x => x !== '');\n return inputs;\n}\nexports.getMultilineInput = getMultilineInput;\n/**\n * Gets the input value of the boolean type in the YAML 1.2 \"core schema\" specification.\n * Support boolean input list: `true | True | TRUE | false | False | FALSE` .\n * The return value is also in boolean type.\n * ref: https://yaml.org/spec/1.2/spec.html#id2804923\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns boolean\n */\nfunction getBooleanInput(name, options) {\n const trueValue = ['true', 'True', 'TRUE'];\n const falseValue = ['false', 'False', 'FALSE'];\n const val = getInput(name, options);\n if (trueValue.includes(val))\n return true;\n if (falseValue.includes(val))\n return false;\n throw new TypeError(`Input does not meet YAML 1.2 \"Core Schema\" specification: ${name}\\n` +\n `Support boolean input list: \\`true | True | TRUE | false | False | FALSE\\``);\n}\nexports.getBooleanInput = getBooleanInput;\n/**\n * Sets the value of an output.\n *\n * @param name name of the output to set\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction setOutput(name, value) {\n process.stdout.write(os.EOL);\n command_1.issueCommand('set-output', { name }, value);\n}\nexports.setOutput = setOutput;\n/**\n * Enables or disables the echoing of commands into stdout for the rest of the step.\n * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.\n *\n */\nfunction setCommandEcho(enabled) {\n command_1.issue('echo', enabled ? 'on' : 'off');\n}\nexports.setCommandEcho = setCommandEcho;\n//-----------------------------------------------------------------------\n// Results\n//-----------------------------------------------------------------------\n/**\n * Sets the action status to failed.\n * When the action exits it will be with an exit code of 1\n * @param message add error issue message\n */\nfunction setFailed(message) {\n process.exitCode = ExitCode.Failure;\n error(message);\n}\nexports.setFailed = setFailed;\n//-----------------------------------------------------------------------\n// Logging Commands\n//-----------------------------------------------------------------------\n/**\n * Gets whether Actions Step Debug is on or not\n */\nfunction isDebug() {\n return process.env['RUNNER_DEBUG'] === '1';\n}\nexports.isDebug = isDebug;\n/**\n * Writes debug message to user log\n * @param message debug message\n */\nfunction debug(message) {\n command_1.issueCommand('debug', {}, message);\n}\nexports.debug = debug;\n/**\n * Adds an error issue\n * @param message error issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction error(message, properties = {}) {\n command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.error = error;\n/**\n * Adds a warning issue\n * @param message warning issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction warning(message, properties = {}) {\n command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.warning = warning;\n/**\n * Adds a notice issue\n * @param message notice issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction notice(message, properties = {}) {\n command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.notice = notice;\n/**\n * Writes info to log with console.log.\n * @param message info message\n */\nfunction info(message) {\n process.stdout.write(message + os.EOL);\n}\nexports.info = info;\n/**\n * Begin an output group.\n *\n * Output until the next `groupEnd` will be foldable in this group\n *\n * @param name The name of the output group\n */\nfunction startGroup(name) {\n command_1.issue('group', name);\n}\nexports.startGroup = startGroup;\n/**\n * End an output group.\n */\nfunction endGroup() {\n command_1.issue('endgroup');\n}\nexports.endGroup = endGroup;\n/**\n * Wrap an asynchronous function call in a group.\n *\n * Returns the same type as the function itself.\n *\n * @param name The name of the group\n * @param fn The function to wrap in the group\n */\nfunction group(name, fn) {\n return __awaiter(this, void 0, void 0, function* () {\n startGroup(name);\n let result;\n try {\n result = yield fn();\n }\n finally {\n endGroup();\n }\n return result;\n });\n}\nexports.group = group;\n//-----------------------------------------------------------------------\n// Wrapper action state\n//-----------------------------------------------------------------------\n/**\n * Saves state for current action, the state can only be retrieved by this action's post job execution.\n *\n * @param name name of the state to store\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction saveState(name, value) {\n command_1.issueCommand('save-state', { name }, value);\n}\nexports.saveState = saveState;\n/**\n * Gets the value of an state set by this action's main execution.\n *\n * @param name name of the state to get\n * @returns string\n */\nfunction getState(name) {\n return process.env[`STATE_${name}`] || '';\n}\nexports.getState = getState;\nfunction getIDToken(aud) {\n return __awaiter(this, void 0, void 0, function* () {\n return yield oidc_utils_1.OidcClient.getIDToken(aud);\n });\n}\nexports.getIDToken = getIDToken;\n/**\n * Summary exports\n */\nvar summary_1 = require(\"./summary\");\nObject.defineProperty(exports, \"summary\", { enumerable: true, get: function () { return summary_1.summary; } });\n/**\n * @deprecated use core.summary\n */\nvar summary_2 = require(\"./summary\");\nObject.defineProperty(exports, \"markdownSummary\", { enumerable: true, get: function () { return summary_2.markdownSummary; } });\n//# sourceMappingURL=core.js.map","\"use strict\";\n// For internal use, subject to change.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.issueCommand = void 0;\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fs = __importStar(require(\"fs\"));\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\nfunction issueCommand(command, message) {\n const filePath = process.env[`GITHUB_${command}`];\n if (!filePath) {\n throw new Error(`Unable to find environment variable for file command ${command}`);\n }\n if (!fs.existsSync(filePath)) {\n throw new Error(`Missing file at path: ${filePath}`);\n }\n fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {\n encoding: 'utf8'\n });\n}\nexports.issueCommand = issueCommand;\n//# sourceMappingURL=file-command.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.OidcClient = void 0;\nconst http_client_1 = require(\"@actions/http-client\");\nconst auth_1 = require(\"@actions/http-client/lib/auth\");\nconst core_1 = require(\"./core\");\nclass OidcClient {\n static createHttpClient(allowRetry = true, maxRetry = 10) {\n const requestOptions = {\n allowRetries: allowRetry,\n maxRetries: maxRetry\n };\n return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);\n }\n static getRequestToken() {\n const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];\n if (!token) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');\n }\n return token;\n }\n static getIDTokenUrl() {\n const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];\n if (!runtimeUrl) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');\n }\n return runtimeUrl;\n }\n static getCall(id_token_url) {\n var _a;\n return __awaiter(this, void 0, void 0, function* () {\n const httpclient = OidcClient.createHttpClient();\n const res = yield httpclient\n .getJson(id_token_url)\n .catch(error => {\n throw new Error(`Failed to get ID Token. \\n \n Error Code : ${error.statusCode}\\n \n Error Message: ${error.result.message}`);\n });\n const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;\n if (!id_token) {\n throw new Error('Response json body do not have ID Token field');\n }\n return id_token;\n });\n }\n static getIDToken(audience) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n // New ID Token is requested from action service\n let id_token_url = OidcClient.getIDTokenUrl();\n if (audience) {\n const encodedAudience = encodeURIComponent(audience);\n id_token_url = `${id_token_url}&audience=${encodedAudience}`;\n }\n core_1.debug(`ID token url is ${id_token_url}`);\n const id_token = yield OidcClient.getCall(id_token_url);\n core_1.setSecret(id_token);\n return id_token;\n }\n catch (error) {\n throw new Error(`Error message: ${error.message}`);\n }\n });\n }\n}\nexports.OidcClient = OidcClient;\n//# sourceMappingURL=oidc-utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;\nconst os_1 = require(\"os\");\nconst fs_1 = require(\"fs\");\nconst { access, appendFile, writeFile } = fs_1.promises;\nexports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';\nexports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';\nclass Summary {\n constructor() {\n this._buffer = '';\n }\n /**\n * Finds the summary file path from the environment, rejects if env var is not found or file does not exist\n * Also checks r/w permissions.\n *\n * @returns step summary file path\n */\n filePath() {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._filePath) {\n return this._filePath;\n }\n const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR];\n if (!pathFromEnv) {\n throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);\n }\n try {\n yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);\n }\n catch (_a) {\n throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);\n }\n this._filePath = pathFromEnv;\n return this._filePath;\n });\n }\n /**\n * Wraps content in an HTML tag, adding any HTML attributes\n *\n * @param {string} tag HTML tag to wrap\n * @param {string | null} content content within the tag\n * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add\n *\n * @returns {string} content wrapped in HTML element\n */\n wrap(tag, content, attrs = {}) {\n const htmlAttrs = Object.entries(attrs)\n .map(([key, value]) => ` ${key}=\"${value}\"`)\n .join('');\n if (!content) {\n return `<${tag}${htmlAttrs}>`;\n }\n return `<${tag}${htmlAttrs}>${content}`;\n }\n /**\n * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.\n *\n * @param {SummaryWriteOptions} [options] (optional) options for write operation\n *\n * @returns {Promise} summary instance\n */\n write(options) {\n return __awaiter(this, void 0, void 0, function* () {\n const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);\n const filePath = yield this.filePath();\n const writeFunc = overwrite ? writeFile : appendFile;\n yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });\n return this.emptyBuffer();\n });\n }\n /**\n * Clears the summary buffer and wipes the summary file\n *\n * @returns {Summary} summary instance\n */\n clear() {\n return __awaiter(this, void 0, void 0, function* () {\n return this.emptyBuffer().write({ overwrite: true });\n });\n }\n /**\n * Returns the current summary buffer as a string\n *\n * @returns {string} string of summary buffer\n */\n stringify() {\n return this._buffer;\n }\n /**\n * If the summary buffer is empty\n *\n * @returns {boolen} true if the buffer is empty\n */\n isEmptyBuffer() {\n return this._buffer.length === 0;\n }\n /**\n * Resets the summary buffer without writing to summary file\n *\n * @returns {Summary} summary instance\n */\n emptyBuffer() {\n this._buffer = '';\n return this;\n }\n /**\n * Adds raw text to the summary buffer\n *\n * @param {string} text content to add\n * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)\n *\n * @returns {Summary} summary instance\n */\n addRaw(text, addEOL = false) {\n this._buffer += text;\n return addEOL ? this.addEOL() : this;\n }\n /**\n * Adds the operating system-specific end-of-line marker to the buffer\n *\n * @returns {Summary} summary instance\n */\n addEOL() {\n return this.addRaw(os_1.EOL);\n }\n /**\n * Adds an HTML codeblock to the summary buffer\n *\n * @param {string} code content to render within fenced code block\n * @param {string} lang (optional) language to syntax highlight code\n *\n * @returns {Summary} summary instance\n */\n addCodeBlock(code, lang) {\n const attrs = Object.assign({}, (lang && { lang }));\n const element = this.wrap('pre', this.wrap('code', code), attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML list to the summary buffer\n *\n * @param {string[]} items list of items to render\n * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)\n *\n * @returns {Summary} summary instance\n */\n addList(items, ordered = false) {\n const tag = ordered ? 'ol' : 'ul';\n const listItems = items.map(item => this.wrap('li', item)).join('');\n const element = this.wrap(tag, listItems);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML table to the summary buffer\n *\n * @param {SummaryTableCell[]} rows table rows\n *\n * @returns {Summary} summary instance\n */\n addTable(rows) {\n const tableBody = rows\n .map(row => {\n const cells = row\n .map(cell => {\n if (typeof cell === 'string') {\n return this.wrap('td', cell);\n }\n const { header, data, colspan, rowspan } = cell;\n const tag = header ? 'th' : 'td';\n const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));\n return this.wrap(tag, data, attrs);\n })\n .join('');\n return this.wrap('tr', cells);\n })\n .join('');\n const element = this.wrap('table', tableBody);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds a collapsable HTML details element to the summary buffer\n *\n * @param {string} label text for the closed state\n * @param {string} content collapsable content\n *\n * @returns {Summary} summary instance\n */\n addDetails(label, content) {\n const element = this.wrap('details', this.wrap('summary', label) + content);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML image tag to the summary buffer\n *\n * @param {string} src path to the image you to embed\n * @param {string} alt text description of the image\n * @param {SummaryImageOptions} options (optional) addition image attributes\n *\n * @returns {Summary} summary instance\n */\n addImage(src, alt, options) {\n const { width, height } = options || {};\n const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));\n const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML section heading element\n *\n * @param {string} text heading text\n * @param {number | string} [level=1] (optional) the heading level, default: 1\n *\n * @returns {Summary} summary instance\n */\n addHeading(text, level) {\n const tag = `h${level}`;\n const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)\n ? tag\n : 'h1';\n const element = this.wrap(allowedTag, text);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML thematic break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addSeparator() {\n const element = this.wrap('hr', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML line break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addBreak() {\n const element = this.wrap('br', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML blockquote to the summary buffer\n *\n * @param {string} text quote text\n * @param {string} cite (optional) citation url\n *\n * @returns {Summary} summary instance\n */\n addQuote(text, cite) {\n const attrs = Object.assign({}, (cite && { cite }));\n const element = this.wrap('blockquote', text, attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML anchor tag to the summary buffer\n *\n * @param {string} text link text/content\n * @param {string} href hyperlink\n *\n * @returns {Summary} summary instance\n */\n addLink(text, href) {\n const element = this.wrap('a', text, { href });\n return this.addRaw(element).addEOL();\n }\n}\nconst _summary = new Summary();\n/**\n * @deprecated use `core.summary`\n */\nexports.markdownSummary = _summary;\nexports.summary = _summary;\n//# sourceMappingURL=summary.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toCommandProperties = exports.toCommandValue = void 0;\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\nexports.toCommandValue = toCommandValue;\n/**\n *\n * @param annotationProperties\n * @returns The command properties to send with the actual annotation command\n * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646\n */\nfunction toCommandProperties(annotationProperties) {\n if (!Object.keys(annotationProperties).length) {\n return {};\n }\n return {\n title: annotationProperties.title,\n file: annotationProperties.file,\n line: annotationProperties.startLine,\n endLine: annotationProperties.endLine,\n col: annotationProperties.startColumn,\n endColumn: annotationProperties.endColumn\n };\n}\nexports.toCommandProperties = toCommandProperties;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Context = void 0;\nconst fs_1 = require(\"fs\");\nconst os_1 = require(\"os\");\nclass Context {\n /**\n * Hydrate the context from the environment\n */\n constructor() {\n var _a, _b, _c;\n this.payload = {};\n if (process.env.GITHUB_EVENT_PATH) {\n if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {\n this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));\n }\n else {\n const path = process.env.GITHUB_EVENT_PATH;\n process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);\n }\n }\n this.eventName = process.env.GITHUB_EVENT_NAME;\n this.sha = process.env.GITHUB_SHA;\n this.ref = process.env.GITHUB_REF;\n this.workflow = process.env.GITHUB_WORKFLOW;\n this.action = process.env.GITHUB_ACTION;\n this.actor = process.env.GITHUB_ACTOR;\n this.job = process.env.GITHUB_JOB;\n this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10);\n this.runId = parseInt(process.env.GITHUB_RUN_ID, 10);\n this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`;\n this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`;\n this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`;\n }\n get issue() {\n const payload = this.payload;\n return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });\n }\n get repo() {\n if (process.env.GITHUB_REPOSITORY) {\n const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');\n return { owner, repo };\n }\n if (this.payload.repository) {\n return {\n owner: this.payload.repository.owner.login,\n repo: this.payload.repository.name\n };\n }\n throw new Error(\"context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'\");\n }\n}\nexports.Context = Context;\n//# sourceMappingURL=context.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOctokit = exports.context = void 0;\nconst Context = __importStar(require(\"./context\"));\nconst utils_1 = require(\"./utils\");\nexports.context = new Context.Context();\n/**\n * Returns a hydrated octokit ready to use for GitHub Actions\n *\n * @param token the repo PAT or GITHUB_TOKEN\n * @param options other options to set\n */\nfunction getOctokit(token, options) {\n return new utils_1.GitHub(utils_1.getOctokitOptions(token, options));\n}\nexports.getOctokit = getOctokit;\n//# sourceMappingURL=github.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0;\nconst httpClient = __importStar(require(\"@actions/http-client\"));\nfunction getAuthString(token, options) {\n if (!token && !options.auth) {\n throw new Error('Parameter token or opts.auth is required');\n }\n else if (token && options.auth) {\n throw new Error('Parameters token and opts.auth may not both be specified');\n }\n return typeof options.auth === 'string' ? options.auth : `token ${token}`;\n}\nexports.getAuthString = getAuthString;\nfunction getProxyAgent(destinationUrl) {\n const hc = new httpClient.HttpClient();\n return hc.getAgent(destinationUrl);\n}\nexports.getProxyAgent = getProxyAgent;\nfunction getApiBaseUrl() {\n return process.env['GITHUB_API_URL'] || 'https://api.github.com';\n}\nexports.getApiBaseUrl = getApiBaseUrl;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOctokitOptions = exports.GitHub = exports.context = void 0;\nconst Context = __importStar(require(\"./context\"));\nconst Utils = __importStar(require(\"./internal/utils\"));\n// octokit + plugins\nconst core_1 = require(\"@octokit/core\");\nconst plugin_rest_endpoint_methods_1 = require(\"@octokit/plugin-rest-endpoint-methods\");\nconst plugin_paginate_rest_1 = require(\"@octokit/plugin-paginate-rest\");\nexports.context = new Context.Context();\nconst baseUrl = Utils.getApiBaseUrl();\nconst defaults = {\n baseUrl,\n request: {\n agent: Utils.getProxyAgent(baseUrl)\n }\n};\nexports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(defaults);\n/**\n * Convience function to correctly format Octokit Options to pass into the constructor.\n *\n * @param token the repo PAT or GITHUB_TOKEN\n * @param options other options to set\n */\nfunction getOctokitOptions(token, options) {\n const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller\n // Auth\n const auth = Utils.getAuthString(token, opts);\n if (auth) {\n opts.auth = auth;\n }\n return opts;\n}\nexports.getOctokitOptions = getOctokitOptions;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;\nclass BasicCredentialHandler {\n constructor(username, password) {\n this.username = username;\n this.password = password;\n }\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BasicCredentialHandler = BasicCredentialHandler;\nclass BearerCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Bearer ${this.token}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BearerCredentialHandler = BearerCredentialHandler;\nclass PersonalAccessTokenCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;\n//# sourceMappingURL=auth.js.map","\"use strict\";\n/* eslint-disable @typescript-eslint/no-explicit-any */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;\nconst http = __importStar(require(\"http\"));\nconst https = __importStar(require(\"https\"));\nconst pm = __importStar(require(\"./proxy\"));\nconst tunnel = __importStar(require(\"tunnel\"));\nvar HttpCodes;\n(function (HttpCodes) {\n HttpCodes[HttpCodes[\"OK\"] = 200] = \"OK\";\n HttpCodes[HttpCodes[\"MultipleChoices\"] = 300] = \"MultipleChoices\";\n HttpCodes[HttpCodes[\"MovedPermanently\"] = 301] = \"MovedPermanently\";\n HttpCodes[HttpCodes[\"ResourceMoved\"] = 302] = \"ResourceMoved\";\n HttpCodes[HttpCodes[\"SeeOther\"] = 303] = \"SeeOther\";\n HttpCodes[HttpCodes[\"NotModified\"] = 304] = \"NotModified\";\n HttpCodes[HttpCodes[\"UseProxy\"] = 305] = \"UseProxy\";\n HttpCodes[HttpCodes[\"SwitchProxy\"] = 306] = \"SwitchProxy\";\n HttpCodes[HttpCodes[\"TemporaryRedirect\"] = 307] = \"TemporaryRedirect\";\n HttpCodes[HttpCodes[\"PermanentRedirect\"] = 308] = \"PermanentRedirect\";\n HttpCodes[HttpCodes[\"BadRequest\"] = 400] = \"BadRequest\";\n HttpCodes[HttpCodes[\"Unauthorized\"] = 401] = \"Unauthorized\";\n HttpCodes[HttpCodes[\"PaymentRequired\"] = 402] = \"PaymentRequired\";\n HttpCodes[HttpCodes[\"Forbidden\"] = 403] = \"Forbidden\";\n HttpCodes[HttpCodes[\"NotFound\"] = 404] = \"NotFound\";\n HttpCodes[HttpCodes[\"MethodNotAllowed\"] = 405] = \"MethodNotAllowed\";\n HttpCodes[HttpCodes[\"NotAcceptable\"] = 406] = \"NotAcceptable\";\n HttpCodes[HttpCodes[\"ProxyAuthenticationRequired\"] = 407] = \"ProxyAuthenticationRequired\";\n HttpCodes[HttpCodes[\"RequestTimeout\"] = 408] = \"RequestTimeout\";\n HttpCodes[HttpCodes[\"Conflict\"] = 409] = \"Conflict\";\n HttpCodes[HttpCodes[\"Gone\"] = 410] = \"Gone\";\n HttpCodes[HttpCodes[\"TooManyRequests\"] = 429] = \"TooManyRequests\";\n HttpCodes[HttpCodes[\"InternalServerError\"] = 500] = \"InternalServerError\";\n HttpCodes[HttpCodes[\"NotImplemented\"] = 501] = \"NotImplemented\";\n HttpCodes[HttpCodes[\"BadGateway\"] = 502] = \"BadGateway\";\n HttpCodes[HttpCodes[\"ServiceUnavailable\"] = 503] = \"ServiceUnavailable\";\n HttpCodes[HttpCodes[\"GatewayTimeout\"] = 504] = \"GatewayTimeout\";\n})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));\nvar Headers;\n(function (Headers) {\n Headers[\"Accept\"] = \"accept\";\n Headers[\"ContentType\"] = \"content-type\";\n})(Headers = exports.Headers || (exports.Headers = {}));\nvar MediaTypes;\n(function (MediaTypes) {\n MediaTypes[\"ApplicationJson\"] = \"application/json\";\n})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));\n/**\n * Returns the proxy URL, depending upon the supplied url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\nfunction getProxyUrl(serverUrl) {\n const proxyUrl = pm.getProxyUrl(new URL(serverUrl));\n return proxyUrl ? proxyUrl.href : '';\n}\nexports.getProxyUrl = getProxyUrl;\nconst HttpRedirectCodes = [\n HttpCodes.MovedPermanently,\n HttpCodes.ResourceMoved,\n HttpCodes.SeeOther,\n HttpCodes.TemporaryRedirect,\n HttpCodes.PermanentRedirect\n];\nconst HttpResponseRetryCodes = [\n HttpCodes.BadGateway,\n HttpCodes.ServiceUnavailable,\n HttpCodes.GatewayTimeout\n];\nconst RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];\nconst ExponentialBackoffCeiling = 10;\nconst ExponentialBackoffTimeSlice = 5;\nclass HttpClientError extends Error {\n constructor(message, statusCode) {\n super(message);\n this.name = 'HttpClientError';\n this.statusCode = statusCode;\n Object.setPrototypeOf(this, HttpClientError.prototype);\n }\n}\nexports.HttpClientError = HttpClientError;\nclass HttpClientResponse {\n constructor(message) {\n this.message = message;\n }\n readBody() {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {\n let output = Buffer.alloc(0);\n this.message.on('data', (chunk) => {\n output = Buffer.concat([output, chunk]);\n });\n this.message.on('end', () => {\n resolve(output.toString());\n });\n }));\n });\n }\n}\nexports.HttpClientResponse = HttpClientResponse;\nfunction isHttps(requestUrl) {\n const parsedUrl = new URL(requestUrl);\n return parsedUrl.protocol === 'https:';\n}\nexports.isHttps = isHttps;\nclass HttpClient {\n constructor(userAgent, handlers, requestOptions) {\n this._ignoreSslError = false;\n this._allowRedirects = true;\n this._allowRedirectDowngrade = false;\n this._maxRedirects = 50;\n this._allowRetries = false;\n this._maxRetries = 1;\n this._keepAlive = false;\n this._disposed = false;\n this.userAgent = userAgent;\n this.handlers = handlers || [];\n this.requestOptions = requestOptions;\n if (requestOptions) {\n if (requestOptions.ignoreSslError != null) {\n this._ignoreSslError = requestOptions.ignoreSslError;\n }\n this._socketTimeout = requestOptions.socketTimeout;\n if (requestOptions.allowRedirects != null) {\n this._allowRedirects = requestOptions.allowRedirects;\n }\n if (requestOptions.allowRedirectDowngrade != null) {\n this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;\n }\n if (requestOptions.maxRedirects != null) {\n this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);\n }\n if (requestOptions.keepAlive != null) {\n this._keepAlive = requestOptions.keepAlive;\n }\n if (requestOptions.allowRetries != null) {\n this._allowRetries = requestOptions.allowRetries;\n }\n if (requestOptions.maxRetries != null) {\n this._maxRetries = requestOptions.maxRetries;\n }\n }\n }\n options(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});\n });\n }\n get(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('GET', requestUrl, null, additionalHeaders || {});\n });\n }\n del(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('DELETE', requestUrl, null, additionalHeaders || {});\n });\n }\n post(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('POST', requestUrl, data, additionalHeaders || {});\n });\n }\n patch(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PATCH', requestUrl, data, additionalHeaders || {});\n });\n }\n put(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PUT', requestUrl, data, additionalHeaders || {});\n });\n }\n head(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('HEAD', requestUrl, null, additionalHeaders || {});\n });\n }\n sendStream(verb, requestUrl, stream, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request(verb, requestUrl, stream, additionalHeaders);\n });\n }\n /**\n * Gets a typed object from an endpoint\n * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise\n */\n getJson(requestUrl, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n const res = yield this.get(requestUrl, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n postJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.post(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n putJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.put(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n patchJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.patch(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n /**\n * Makes a raw http request.\n * All other methods such as get, post, patch, and request ultimately call this.\n * Prefer get, del, post and patch\n */\n request(verb, requestUrl, data, headers) {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._disposed) {\n throw new Error('Client has already been disposed.');\n }\n const parsedUrl = new URL(requestUrl);\n let info = this._prepareRequest(verb, parsedUrl, headers);\n // Only perform retries on reads since writes may not be idempotent.\n const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)\n ? this._maxRetries + 1\n : 1;\n let numTries = 0;\n let response;\n do {\n response = yield this.requestRaw(info, data);\n // Check if it's an authentication challenge\n if (response &&\n response.message &&\n response.message.statusCode === HttpCodes.Unauthorized) {\n let authenticationHandler;\n for (const handler of this.handlers) {\n if (handler.canHandleAuthentication(response)) {\n authenticationHandler = handler;\n break;\n }\n }\n if (authenticationHandler) {\n return authenticationHandler.handleAuthentication(this, info, data);\n }\n else {\n // We have received an unauthorized response but have no handlers to handle it.\n // Let the response return to the caller.\n return response;\n }\n }\n let redirectsRemaining = this._maxRedirects;\n while (response.message.statusCode &&\n HttpRedirectCodes.includes(response.message.statusCode) &&\n this._allowRedirects &&\n redirectsRemaining > 0) {\n const redirectUrl = response.message.headers['location'];\n if (!redirectUrl) {\n // if there's no location to redirect to, we won't\n break;\n }\n const parsedRedirectUrl = new URL(redirectUrl);\n if (parsedUrl.protocol === 'https:' &&\n parsedUrl.protocol !== parsedRedirectUrl.protocol &&\n !this._allowRedirectDowngrade) {\n throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');\n }\n // we need to finish reading the response before reassigning response\n // which will leak the open socket.\n yield response.readBody();\n // strip authorization header if redirected to a different hostname\n if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {\n for (const header in headers) {\n // header names are case insensitive\n if (header.toLowerCase() === 'authorization') {\n delete headers[header];\n }\n }\n }\n // let's make the request with the new redirectUrl\n info = this._prepareRequest(verb, parsedRedirectUrl, headers);\n response = yield this.requestRaw(info, data);\n redirectsRemaining--;\n }\n if (!response.message.statusCode ||\n !HttpResponseRetryCodes.includes(response.message.statusCode)) {\n // If not a retry code, return immediately instead of retrying\n return response;\n }\n numTries += 1;\n if (numTries < maxTries) {\n yield response.readBody();\n yield this._performExponentialBackoff(numTries);\n }\n } while (numTries < maxTries);\n return response;\n });\n }\n /**\n * Needs to be called if keepAlive is set to true in request options.\n */\n dispose() {\n if (this._agent) {\n this._agent.destroy();\n }\n this._disposed = true;\n }\n /**\n * Raw request.\n * @param info\n * @param data\n */\n requestRaw(info, data) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => {\n function callbackForResult(err, res) {\n if (err) {\n reject(err);\n }\n else if (!res) {\n // If `err` is not passed, then `res` must be passed.\n reject(new Error('Unknown error'));\n }\n else {\n resolve(res);\n }\n }\n this.requestRawWithCallback(info, data, callbackForResult);\n });\n });\n }\n /**\n * Raw request with callback.\n * @param info\n * @param data\n * @param onResult\n */\n requestRawWithCallback(info, data, onResult) {\n if (typeof data === 'string') {\n if (!info.options.headers) {\n info.options.headers = {};\n }\n info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');\n }\n let callbackCalled = false;\n function handleResult(err, res) {\n if (!callbackCalled) {\n callbackCalled = true;\n onResult(err, res);\n }\n }\n const req = info.httpModule.request(info.options, (msg) => {\n const res = new HttpClientResponse(msg);\n handleResult(undefined, res);\n });\n let socket;\n req.on('socket', sock => {\n socket = sock;\n });\n // If we ever get disconnected, we want the socket to timeout eventually\n req.setTimeout(this._socketTimeout || 3 * 60000, () => {\n if (socket) {\n socket.end();\n }\n handleResult(new Error(`Request timeout: ${info.options.path}`));\n });\n req.on('error', function (err) {\n // err has statusCode property\n // res should have headers\n handleResult(err);\n });\n if (data && typeof data === 'string') {\n req.write(data, 'utf8');\n }\n if (data && typeof data !== 'string') {\n data.on('close', function () {\n req.end();\n });\n data.pipe(req);\n }\n else {\n req.end();\n }\n }\n /**\n * Gets an http agent. This function is useful when you need an http agent that handles\n * routing through a proxy server - depending upon the url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\n getAgent(serverUrl) {\n const parsedUrl = new URL(serverUrl);\n return this._getAgent(parsedUrl);\n }\n _prepareRequest(method, requestUrl, headers) {\n const info = {};\n info.parsedUrl = requestUrl;\n const usingSsl = info.parsedUrl.protocol === 'https:';\n info.httpModule = usingSsl ? https : http;\n const defaultPort = usingSsl ? 443 : 80;\n info.options = {};\n info.options.host = info.parsedUrl.hostname;\n info.options.port = info.parsedUrl.port\n ? parseInt(info.parsedUrl.port)\n : defaultPort;\n info.options.path =\n (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');\n info.options.method = method;\n info.options.headers = this._mergeHeaders(headers);\n if (this.userAgent != null) {\n info.options.headers['user-agent'] = this.userAgent;\n }\n info.options.agent = this._getAgent(info.parsedUrl);\n // gives handlers an opportunity to participate\n if (this.handlers) {\n for (const handler of this.handlers) {\n handler.prepareRequest(info.options);\n }\n }\n return info;\n }\n _mergeHeaders(headers) {\n if (this.requestOptions && this.requestOptions.headers) {\n return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));\n }\n return lowercaseKeys(headers || {});\n }\n _getExistingOrDefaultHeader(additionalHeaders, header, _default) {\n let clientHeader;\n if (this.requestOptions && this.requestOptions.headers) {\n clientHeader = lowercaseKeys(this.requestOptions.headers)[header];\n }\n return additionalHeaders[header] || clientHeader || _default;\n }\n _getAgent(parsedUrl) {\n let agent;\n const proxyUrl = pm.getProxyUrl(parsedUrl);\n const useProxy = proxyUrl && proxyUrl.hostname;\n if (this._keepAlive && useProxy) {\n agent = this._proxyAgent;\n }\n if (this._keepAlive && !useProxy) {\n agent = this._agent;\n }\n // if agent is already assigned use that agent.\n if (agent) {\n return agent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n let maxSockets = 100;\n if (this.requestOptions) {\n maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;\n }\n // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.\n if (proxyUrl && proxyUrl.hostname) {\n const agentOptions = {\n maxSockets,\n keepAlive: this._keepAlive,\n proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {\n proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`\n })), { host: proxyUrl.hostname, port: proxyUrl.port })\n };\n let tunnelAgent;\n const overHttps = proxyUrl.protocol === 'https:';\n if (usingSsl) {\n tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;\n }\n else {\n tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;\n }\n agent = tunnelAgent(agentOptions);\n this._proxyAgent = agent;\n }\n // if reusing agent across request and tunneling agent isn't assigned create a new agent\n if (this._keepAlive && !agent) {\n const options = { keepAlive: this._keepAlive, maxSockets };\n agent = usingSsl ? new https.Agent(options) : new http.Agent(options);\n this._agent = agent;\n }\n // if not using private agent and tunnel agent isn't setup then use global agent\n if (!agent) {\n agent = usingSsl ? https.globalAgent : http.globalAgent;\n }\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n agent.options = Object.assign(agent.options || {}, {\n rejectUnauthorized: false\n });\n }\n return agent;\n }\n _performExponentialBackoff(retryNumber) {\n return __awaiter(this, void 0, void 0, function* () {\n retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);\n const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);\n return new Promise(resolve => setTimeout(() => resolve(), ms));\n });\n }\n _processResponse(res, options) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {\n const statusCode = res.message.statusCode || 0;\n const response = {\n statusCode,\n result: null,\n headers: {}\n };\n // not found leads to null obj returned\n if (statusCode === HttpCodes.NotFound) {\n resolve(response);\n }\n // get the result from the body\n function dateTimeDeserializer(key, value) {\n if (typeof value === 'string') {\n const a = new Date(value);\n if (!isNaN(a.valueOf())) {\n return a;\n }\n }\n return value;\n }\n let obj;\n let contents;\n try {\n contents = yield res.readBody();\n if (contents && contents.length > 0) {\n if (options && options.deserializeDates) {\n obj = JSON.parse(contents, dateTimeDeserializer);\n }\n else {\n obj = JSON.parse(contents);\n }\n response.result = obj;\n }\n response.headers = res.message.headers;\n }\n catch (err) {\n // Invalid resource (contents not json); leaving result obj null\n }\n // note that 3xx redirects are handled by the http layer.\n if (statusCode > 299) {\n let msg;\n // if exception/error in body, attempt to get better error\n if (obj && obj.message) {\n msg = obj.message;\n }\n else if (contents && contents.length > 0) {\n // it may be the case that the exception is in the body message as string\n msg = contents;\n }\n else {\n msg = `Failed request: (${statusCode})`;\n }\n const err = new HttpClientError(msg, statusCode);\n err.result = response.result;\n reject(err);\n }\n else {\n resolve(response);\n }\n }));\n });\n }\n}\nexports.HttpClient = HttpClient;\nconst lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.checkBypass = exports.getProxyUrl = void 0;\nfunction getProxyUrl(reqUrl) {\n const usingSsl = reqUrl.protocol === 'https:';\n if (checkBypass(reqUrl)) {\n return undefined;\n }\n const proxyVar = (() => {\n if (usingSsl) {\n return process.env['https_proxy'] || process.env['HTTPS_PROXY'];\n }\n else {\n return process.env['http_proxy'] || process.env['HTTP_PROXY'];\n }\n })();\n if (proxyVar) {\n return new URL(proxyVar);\n }\n else {\n return undefined;\n }\n}\nexports.getProxyUrl = getProxyUrl;\nfunction checkBypass(reqUrl) {\n if (!reqUrl.hostname) {\n return false;\n }\n const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';\n if (!noProxy) {\n return false;\n }\n // Determine the request port\n let reqPort;\n if (reqUrl.port) {\n reqPort = Number(reqUrl.port);\n }\n else if (reqUrl.protocol === 'http:') {\n reqPort = 80;\n }\n else if (reqUrl.protocol === 'https:') {\n reqPort = 443;\n }\n // Format the request hostname and hostname with port\n const upperReqHosts = [reqUrl.hostname.toUpperCase()];\n if (typeof reqPort === 'number') {\n upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);\n }\n // Compare request host against noproxy\n for (const upperNoProxyItem of noProxy\n .split(',')\n .map(x => x.trim().toUpperCase())\n .filter(x => x)) {\n if (upperReqHosts.some(x => x === upperNoProxyItem)) {\n return true;\n }\n }\n return false;\n}\nexports.checkBypass = checkBypass;\n//# sourceMappingURL=proxy.js.map","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst REGEX_IS_INSTALLATION_LEGACY = /^v1\\./;\nconst REGEX_IS_INSTALLATION = /^ghs_/;\nconst REGEX_IS_USER_TO_SERVER = /^ghu_/;\nasync function auth(token) {\n const isApp = token.split(/\\./).length === 3;\n const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token);\n const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token);\n const tokenType = isApp ? \"app\" : isInstallation ? \"installation\" : isUserToServer ? \"user-to-server\" : \"oauth\";\n return {\n type: \"token\",\n token: token,\n tokenType\n };\n}\n\n/**\n * Prefix token for usage in the Authorization header\n *\n * @param token OAuth token or JSON Web Token\n */\nfunction withAuthorizationPrefix(token) {\n if (token.split(/\\./).length === 3) {\n return `bearer ${token}`;\n }\n\n return `token ${token}`;\n}\n\nasync function hook(token, request, route, parameters) {\n const endpoint = request.endpoint.merge(route, parameters);\n endpoint.headers.authorization = withAuthorizationPrefix(token);\n return request(endpoint);\n}\n\nconst createTokenAuth = function createTokenAuth(token) {\n if (!token) {\n throw new Error(\"[@octokit/auth-token] No token passed to createTokenAuth\");\n }\n\n if (typeof token !== \"string\") {\n throw new Error(\"[@octokit/auth-token] Token passed to createTokenAuth is not a string\");\n }\n\n token = token.replace(/^(token|bearer) +/i, \"\");\n return Object.assign(auth.bind(null, token), {\n hook: hook.bind(null, token)\n });\n};\n\nexports.createTokenAuth = createTokenAuth;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar universalUserAgent = require('universal-user-agent');\nvar beforeAfterHook = require('before-after-hook');\nvar request = require('@octokit/request');\nvar graphql = require('@octokit/graphql');\nvar authToken = require('@octokit/auth-token');\n\nfunction _objectWithoutPropertiesLoose(source, excluded) {\n if (source == null) return {};\n var target = {};\n var sourceKeys = Object.keys(source);\n var key, i;\n\n for (i = 0; i < sourceKeys.length; i++) {\n key = sourceKeys[i];\n if (excluded.indexOf(key) >= 0) continue;\n target[key] = source[key];\n }\n\n return target;\n}\n\nfunction _objectWithoutProperties(source, excluded) {\n if (source == null) return {};\n\n var target = _objectWithoutPropertiesLoose(source, excluded);\n\n var key, i;\n\n if (Object.getOwnPropertySymbols) {\n var sourceSymbolKeys = Object.getOwnPropertySymbols(source);\n\n for (i = 0; i < sourceSymbolKeys.length; i++) {\n key = sourceSymbolKeys[i];\n if (excluded.indexOf(key) >= 0) continue;\n if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue;\n target[key] = source[key];\n }\n }\n\n return target;\n}\n\nconst VERSION = \"3.6.0\";\n\nconst _excluded = [\"authStrategy\"];\nclass Octokit {\n constructor(options = {}) {\n const hook = new beforeAfterHook.Collection();\n const requestDefaults = {\n baseUrl: request.request.endpoint.DEFAULTS.baseUrl,\n headers: {},\n request: Object.assign({}, options.request, {\n // @ts-ignore internal usage only, no need to type\n hook: hook.bind(null, \"request\")\n }),\n mediaType: {\n previews: [],\n format: \"\"\n }\n }; // prepend default user agent with `options.userAgent` if set\n\n requestDefaults.headers[\"user-agent\"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(\" \");\n\n if (options.baseUrl) {\n requestDefaults.baseUrl = options.baseUrl;\n }\n\n if (options.previews) {\n requestDefaults.mediaType.previews = options.previews;\n }\n\n if (options.timeZone) {\n requestDefaults.headers[\"time-zone\"] = options.timeZone;\n }\n\n this.request = request.request.defaults(requestDefaults);\n this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults);\n this.log = Object.assign({\n debug: () => {},\n info: () => {},\n warn: console.warn.bind(console),\n error: console.error.bind(console)\n }, options.log);\n this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance\n // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.\n // (2) If only `options.auth` is set, use the default token authentication strategy.\n // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.\n // TODO: type `options.auth` based on `options.authStrategy`.\n\n if (!options.authStrategy) {\n if (!options.auth) {\n // (1)\n this.auth = async () => ({\n type: \"unauthenticated\"\n });\n } else {\n // (2)\n const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\\_(ツ)_/¯\n\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n } else {\n const {\n authStrategy\n } = options,\n otherOptions = _objectWithoutProperties(options, _excluded);\n\n const auth = authStrategy(Object.assign({\n request: this.request,\n log: this.log,\n // we pass the current octokit instance as well as its constructor options\n // to allow for authentication strategies that return a new octokit instance\n // that shares the same internal state as the current one. The original\n // requirement for this was the \"event-octokit\" authentication strategy\n // of https://github.com/probot/octokit-auth-probot.\n octokit: this,\n octokitOptions: otherOptions\n }, options.auth)); // @ts-ignore ¯\\_(ツ)_/¯\n\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n } // apply plugins\n // https://stackoverflow.com/a/16345172\n\n\n const classConstructor = this.constructor;\n classConstructor.plugins.forEach(plugin => {\n Object.assign(this, plugin(this, options));\n });\n }\n\n static defaults(defaults) {\n const OctokitWithDefaults = class extends this {\n constructor(...args) {\n const options = args[0] || {};\n\n if (typeof defaults === \"function\") {\n super(defaults(options));\n return;\n }\n\n super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? {\n userAgent: `${options.userAgent} ${defaults.userAgent}`\n } : null));\n }\n\n };\n return OctokitWithDefaults;\n }\n /**\n * Attach a plugin (or many) to your Octokit instance.\n *\n * @example\n * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)\n */\n\n\n static plugin(...newPlugins) {\n var _a;\n\n const currentPlugins = this.plugins;\n const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a);\n return NewOctokit;\n }\n\n}\nOctokit.VERSION = VERSION;\nOctokit.plugins = [];\n\nexports.Octokit = Octokit;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar isPlainObject = require('is-plain-object');\nvar universalUserAgent = require('universal-user-agent');\n\nfunction lowercaseKeys(object) {\n if (!object) {\n return {};\n }\n\n return Object.keys(object).reduce((newObj, key) => {\n newObj[key.toLowerCase()] = object[key];\n return newObj;\n }, {});\n}\n\nfunction mergeDeep(defaults, options) {\n const result = Object.assign({}, defaults);\n Object.keys(options).forEach(key => {\n if (isPlainObject.isPlainObject(options[key])) {\n if (!(key in defaults)) Object.assign(result, {\n [key]: options[key]\n });else result[key] = mergeDeep(defaults[key], options[key]);\n } else {\n Object.assign(result, {\n [key]: options[key]\n });\n }\n });\n return result;\n}\n\nfunction removeUndefinedProperties(obj) {\n for (const key in obj) {\n if (obj[key] === undefined) {\n delete obj[key];\n }\n }\n\n return obj;\n}\n\nfunction merge(defaults, route, options) {\n if (typeof route === \"string\") {\n let [method, url] = route.split(\" \");\n options = Object.assign(url ? {\n method,\n url\n } : {\n url: method\n }, options);\n } else {\n options = Object.assign({}, route);\n } // lowercase header names before merging with defaults to avoid duplicates\n\n\n options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging\n\n removeUndefinedProperties(options);\n removeUndefinedProperties(options.headers);\n const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten\n\n if (defaults && defaults.mediaType.previews.length) {\n mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);\n }\n\n mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, \"\"));\n return mergedOptions;\n}\n\nfunction addQueryParameters(url, parameters) {\n const separator = /\\?/.test(url) ? \"&\" : \"?\";\n const names = Object.keys(parameters);\n\n if (names.length === 0) {\n return url;\n }\n\n return url + separator + names.map(name => {\n if (name === \"q\") {\n return \"q=\" + parameters.q.split(\"+\").map(encodeURIComponent).join(\"+\");\n }\n\n return `${name}=${encodeURIComponent(parameters[name])}`;\n }).join(\"&\");\n}\n\nconst urlVariableRegex = /\\{[^}]+\\}/g;\n\nfunction removeNonChars(variableName) {\n return variableName.replace(/^\\W+|\\W+$/g, \"\").split(/,/);\n}\n\nfunction extractUrlVariableNames(url) {\n const matches = url.match(urlVariableRegex);\n\n if (!matches) {\n return [];\n }\n\n return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);\n}\n\nfunction omit(object, keysToOmit) {\n return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => {\n obj[key] = object[key];\n return obj;\n }, {});\n}\n\n// Based on https://github.com/bramstein/url-template, licensed under BSD\n// TODO: create separate package.\n//\n// Copyright (c) 2012-2014, Bram Stein\n// All rights reserved.\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions\n// are met:\n// 1. Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n// 2. Redistributions in binary form must reproduce the above copyright\n// notice, this list of conditions and the following disclaimer in the\n// documentation and/or other materials provided with the distribution.\n// 3. The name of the author may not be used to endorse or promote products\n// derived from this software without specific prior written permission.\n// THIS SOFTWARE IS PROVIDED BY THE AUTHOR \"AS IS\" AND ANY EXPRESS OR IMPLIED\n// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\n// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO\n// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,\n// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,\n// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY\n// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,\n// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n/* istanbul ignore file */\nfunction encodeReserved(str) {\n return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) {\n if (!/%[0-9A-Fa-f]/.test(part)) {\n part = encodeURI(part).replace(/%5B/g, \"[\").replace(/%5D/g, \"]\");\n }\n\n return part;\n }).join(\"\");\n}\n\nfunction encodeUnreserved(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {\n return \"%\" + c.charCodeAt(0).toString(16).toUpperCase();\n });\n}\n\nfunction encodeValue(operator, value, key) {\n value = operator === \"+\" || operator === \"#\" ? encodeReserved(value) : encodeUnreserved(value);\n\n if (key) {\n return encodeUnreserved(key) + \"=\" + value;\n } else {\n return value;\n }\n}\n\nfunction isDefined(value) {\n return value !== undefined && value !== null;\n}\n\nfunction isKeyOperator(operator) {\n return operator === \";\" || operator === \"&\" || operator === \"?\";\n}\n\nfunction getValues(context, operator, key, modifier) {\n var value = context[key],\n result = [];\n\n if (isDefined(value) && value !== \"\") {\n if (typeof value === \"string\" || typeof value === \"number\" || typeof value === \"boolean\") {\n value = value.toString();\n\n if (modifier && modifier !== \"*\") {\n value = value.substring(0, parseInt(modifier, 10));\n }\n\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n } else {\n if (modifier === \"*\") {\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n });\n } else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n result.push(encodeValue(operator, value[k], k));\n }\n });\n }\n } else {\n const tmp = [];\n\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n tmp.push(encodeValue(operator, value));\n });\n } else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n tmp.push(encodeUnreserved(k));\n tmp.push(encodeValue(operator, value[k].toString()));\n }\n });\n }\n\n if (isKeyOperator(operator)) {\n result.push(encodeUnreserved(key) + \"=\" + tmp.join(\",\"));\n } else if (tmp.length !== 0) {\n result.push(tmp.join(\",\"));\n }\n }\n }\n } else {\n if (operator === \";\") {\n if (isDefined(value)) {\n result.push(encodeUnreserved(key));\n }\n } else if (value === \"\" && (operator === \"&\" || operator === \"?\")) {\n result.push(encodeUnreserved(key) + \"=\");\n } else if (value === \"\") {\n result.push(\"\");\n }\n }\n\n return result;\n}\n\nfunction parseUrl(template) {\n return {\n expand: expand.bind(null, template)\n };\n}\n\nfunction expand(template, context) {\n var operators = [\"+\", \"#\", \".\", \"/\", \";\", \"?\", \"&\"];\n return template.replace(/\\{([^\\{\\}]+)\\}|([^\\{\\}]+)/g, function (_, expression, literal) {\n if (expression) {\n let operator = \"\";\n const values = [];\n\n if (operators.indexOf(expression.charAt(0)) !== -1) {\n operator = expression.charAt(0);\n expression = expression.substr(1);\n }\n\n expression.split(/,/g).forEach(function (variable) {\n var tmp = /([^:\\*]*)(?::(\\d+)|(\\*))?/.exec(variable);\n values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));\n });\n\n if (operator && operator !== \"+\") {\n var separator = \",\";\n\n if (operator === \"?\") {\n separator = \"&\";\n } else if (operator !== \"#\") {\n separator = operator;\n }\n\n return (values.length !== 0 ? operator : \"\") + values.join(separator);\n } else {\n return values.join(\",\");\n }\n } else {\n return encodeReserved(literal);\n }\n });\n}\n\nfunction parse(options) {\n // https://fetch.spec.whatwg.org/#methods\n let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible\n\n let url = (options.url || \"/\").replace(/:([a-z]\\w+)/g, \"{$1}\");\n let headers = Object.assign({}, options.headers);\n let body;\n let parameters = omit(options, [\"method\", \"baseUrl\", \"url\", \"headers\", \"request\", \"mediaType\"]); // extract variable names from URL to calculate remaining variables later\n\n const urlVariableNames = extractUrlVariableNames(url);\n url = parseUrl(url).expand(parameters);\n\n if (!/^http/.test(url)) {\n url = options.baseUrl + url;\n }\n\n const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat(\"baseUrl\");\n const remainingParameters = omit(parameters, omittedParameters);\n const isBinaryRequest = /application\\/octet-stream/i.test(headers.accept);\n\n if (!isBinaryRequest) {\n if (options.mediaType.format) {\n // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw\n headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\\/vnd(\\.\\w+)(\\.v3)?(\\.\\w+)?(\\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(\",\");\n }\n\n if (options.mediaType.previews.length) {\n const previewsFromAcceptHeader = headers.accept.match(/[\\w-]+(?=-preview)/g) || [];\n headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => {\n const format = options.mediaType.format ? `.${options.mediaType.format}` : \"+json\";\n return `application/vnd.github.${preview}-preview${format}`;\n }).join(\",\");\n }\n } // for GET/HEAD requests, set URL query parameters from remaining parameters\n // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters\n\n\n if ([\"GET\", \"HEAD\"].includes(method)) {\n url = addQueryParameters(url, remainingParameters);\n } else {\n if (\"data\" in remainingParameters) {\n body = remainingParameters.data;\n } else {\n if (Object.keys(remainingParameters).length) {\n body = remainingParameters;\n } else {\n headers[\"content-length\"] = 0;\n }\n }\n } // default content-type for JSON if body is set\n\n\n if (!headers[\"content-type\"] && typeof body !== \"undefined\") {\n headers[\"content-type\"] = \"application/json; charset=utf-8\";\n } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.\n // fetch does not allow to set `content-length` header, but we can set body to an empty string\n\n\n if ([\"PATCH\", \"PUT\"].includes(method) && typeof body === \"undefined\") {\n body = \"\";\n } // Only return body/request keys if present\n\n\n return Object.assign({\n method,\n url,\n headers\n }, typeof body !== \"undefined\" ? {\n body\n } : null, options.request ? {\n request: options.request\n } : null);\n}\n\nfunction endpointWithDefaults(defaults, route, options) {\n return parse(merge(defaults, route, options));\n}\n\nfunction withDefaults(oldDefaults, newDefaults) {\n const DEFAULTS = merge(oldDefaults, newDefaults);\n const endpoint = endpointWithDefaults.bind(null, DEFAULTS);\n return Object.assign(endpoint, {\n DEFAULTS,\n defaults: withDefaults.bind(null, DEFAULTS),\n merge: merge.bind(null, DEFAULTS),\n parse\n });\n}\n\nconst VERSION = \"6.0.12\";\n\nconst userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url.\n// So we use RequestParameters and add method as additional required property.\n\nconst DEFAULTS = {\n method: \"GET\",\n baseUrl: \"https://api.github.com\",\n headers: {\n accept: \"application/vnd.github.v3+json\",\n \"user-agent\": userAgent\n },\n mediaType: {\n format: \"\",\n previews: []\n }\n};\n\nconst endpoint = withDefaults(null, DEFAULTS);\n\nexports.endpoint = endpoint;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar request = require('@octokit/request');\nvar universalUserAgent = require('universal-user-agent');\n\nconst VERSION = \"4.8.0\";\n\nfunction _buildMessageForResponseErrors(data) {\n return `Request failed due to following response errors:\\n` + data.errors.map(e => ` - ${e.message}`).join(\"\\n\");\n}\n\nclass GraphqlResponseError extends Error {\n constructor(request, headers, response) {\n super(_buildMessageForResponseErrors(response));\n this.request = request;\n this.headers = headers;\n this.response = response;\n this.name = \"GraphqlResponseError\"; // Expose the errors and response data in their shorthand properties.\n\n this.errors = response.errors;\n this.data = response.data; // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n\n}\n\nconst NON_VARIABLE_OPTIONS = [\"method\", \"baseUrl\", \"url\", \"headers\", \"request\", \"query\", \"mediaType\"];\nconst FORBIDDEN_VARIABLE_OPTIONS = [\"query\", \"method\", \"url\"];\nconst GHES_V3_SUFFIX_REGEX = /\\/api\\/v3\\/?$/;\nfunction graphql(request, query, options) {\n if (options) {\n if (typeof query === \"string\" && \"query\" in options) {\n return Promise.reject(new Error(`[@octokit/graphql] \"query\" cannot be used as variable name`));\n }\n\n for (const key in options) {\n if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue;\n return Promise.reject(new Error(`[@octokit/graphql] \"${key}\" cannot be used as variable name`));\n }\n }\n\n const parsedOptions = typeof query === \"string\" ? Object.assign({\n query\n }, options) : query;\n const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {\n if (NON_VARIABLE_OPTIONS.includes(key)) {\n result[key] = parsedOptions[key];\n return result;\n }\n\n if (!result.variables) {\n result.variables = {};\n }\n\n result.variables[key] = parsedOptions[key];\n return result;\n }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix\n // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451\n\n const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;\n\n if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {\n requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, \"/api/graphql\");\n }\n\n return request(requestOptions).then(response => {\n if (response.data.errors) {\n const headers = {};\n\n for (const key of Object.keys(response.headers)) {\n headers[key] = response.headers[key];\n }\n\n throw new GraphqlResponseError(requestOptions, headers, response.data);\n }\n\n return response.data.data;\n });\n}\n\nfunction withDefaults(request$1, newDefaults) {\n const newRequest = request$1.defaults(newDefaults);\n\n const newApi = (query, options) => {\n return graphql(newRequest, query, options);\n };\n\n return Object.assign(newApi, {\n defaults: withDefaults.bind(null, newRequest),\n endpoint: request.request.endpoint\n });\n}\n\nconst graphql$1 = withDefaults(request.request, {\n headers: {\n \"user-agent\": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n },\n method: \"POST\",\n url: \"/graphql\"\n});\nfunction withCustomRequest(customRequest) {\n return withDefaults(customRequest, {\n method: \"POST\",\n url: \"/graphql\"\n });\n}\n\nexports.GraphqlResponseError = GraphqlResponseError;\nexports.graphql = graphql$1;\nexports.withCustomRequest = withCustomRequest;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst VERSION = \"2.17.0\";\n\nfunction ownKeys(object, enumerableOnly) {\n var keys = Object.keys(object);\n\n if (Object.getOwnPropertySymbols) {\n var symbols = Object.getOwnPropertySymbols(object);\n\n if (enumerableOnly) {\n symbols = symbols.filter(function (sym) {\n return Object.getOwnPropertyDescriptor(object, sym).enumerable;\n });\n }\n\n keys.push.apply(keys, symbols);\n }\n\n return keys;\n}\n\nfunction _objectSpread2(target) {\n for (var i = 1; i < arguments.length; i++) {\n var source = arguments[i] != null ? arguments[i] : {};\n\n if (i % 2) {\n ownKeys(Object(source), true).forEach(function (key) {\n _defineProperty(target, key, source[key]);\n });\n } else if (Object.getOwnPropertyDescriptors) {\n Object.defineProperties(target, Object.getOwnPropertyDescriptors(source));\n } else {\n ownKeys(Object(source)).forEach(function (key) {\n Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));\n });\n }\n }\n\n return target;\n}\n\nfunction _defineProperty(obj, key, value) {\n if (key in obj) {\n Object.defineProperty(obj, key, {\n value: value,\n enumerable: true,\n configurable: true,\n writable: true\n });\n } else {\n obj[key] = value;\n }\n\n return obj;\n}\n\n/**\n * Some “list” response that can be paginated have a different response structure\n *\n * They have a `total_count` key in the response (search also has `incomplete_results`,\n * /installation/repositories also has `repository_selection`), as well as a key with\n * the list of the items which name varies from endpoint to endpoint.\n *\n * Octokit normalizes these responses so that paginated results are always returned following\n * the same structure. One challenge is that if the list response has only one page, no Link\n * header is provided, so this header alone is not sufficient to check wether a response is\n * paginated or not.\n *\n * We check if a \"total_count\" key is present in the response data, but also make sure that\n * a \"url\" property is not, as the \"Get the combined status for a specific ref\" endpoint would\n * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref\n */\nfunction normalizePaginatedListResponse(response) {\n // endpoints can respond with 204 if repository is empty\n if (!response.data) {\n return _objectSpread2(_objectSpread2({}, response), {}, {\n data: []\n });\n }\n\n const responseNeedsNormalization = \"total_count\" in response.data && !(\"url\" in response.data);\n if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way\n // to retrieve the same information.\n\n const incompleteResults = response.data.incomplete_results;\n const repositorySelection = response.data.repository_selection;\n const totalCount = response.data.total_count;\n delete response.data.incomplete_results;\n delete response.data.repository_selection;\n delete response.data.total_count;\n const namespaceKey = Object.keys(response.data)[0];\n const data = response.data[namespaceKey];\n response.data = data;\n\n if (typeof incompleteResults !== \"undefined\") {\n response.data.incomplete_results = incompleteResults;\n }\n\n if (typeof repositorySelection !== \"undefined\") {\n response.data.repository_selection = repositorySelection;\n }\n\n response.data.total_count = totalCount;\n return response;\n}\n\nfunction iterator(octokit, route, parameters) {\n const options = typeof route === \"function\" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);\n const requestMethod = typeof route === \"function\" ? route : octokit.request;\n const method = options.method;\n const headers = options.headers;\n let url = options.url;\n return {\n [Symbol.asyncIterator]: () => ({\n async next() {\n if (!url) return {\n done: true\n };\n\n try {\n const response = await requestMethod({\n method,\n url,\n headers\n });\n const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format:\n // '; rel=\"next\", ; rel=\"last\"'\n // sets `url` to undefined if \"next\" URL is not present or `link` header is not set\n\n url = ((normalizedResponse.headers.link || \"\").match(/<([^>]+)>;\\s*rel=\"next\"/) || [])[1];\n return {\n value: normalizedResponse\n };\n } catch (error) {\n if (error.status !== 409) throw error;\n url = \"\";\n return {\n value: {\n status: 200,\n headers: {},\n data: []\n }\n };\n }\n }\n\n })\n };\n}\n\nfunction paginate(octokit, route, parameters, mapFn) {\n if (typeof parameters === \"function\") {\n mapFn = parameters;\n parameters = undefined;\n }\n\n return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);\n}\n\nfunction gather(octokit, results, iterator, mapFn) {\n return iterator.next().then(result => {\n if (result.done) {\n return results;\n }\n\n let earlyExit = false;\n\n function done() {\n earlyExit = true;\n }\n\n results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);\n\n if (earlyExit) {\n return results;\n }\n\n return gather(octokit, results, iterator, mapFn);\n });\n}\n\nconst composePaginateRest = Object.assign(paginate, {\n iterator\n});\n\nconst paginatingEndpoints = [\"GET /app/hook/deliveries\", \"GET /app/installations\", \"GET /applications/grants\", \"GET /authorizations\", \"GET /enterprises/{enterprise}/actions/permissions/organizations\", \"GET /enterprises/{enterprise}/actions/runner-groups\", \"GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations\", \"GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners\", \"GET /enterprises/{enterprise}/actions/runners\", \"GET /enterprises/{enterprise}/actions/runners/downloads\", \"GET /events\", \"GET /gists\", \"GET /gists/public\", \"GET /gists/starred\", \"GET /gists/{gist_id}/comments\", \"GET /gists/{gist_id}/commits\", \"GET /gists/{gist_id}/forks\", \"GET /installation/repositories\", \"GET /issues\", \"GET /marketplace_listing/plans\", \"GET /marketplace_listing/plans/{plan_id}/accounts\", \"GET /marketplace_listing/stubbed/plans\", \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\", \"GET /networks/{owner}/{repo}/events\", \"GET /notifications\", \"GET /organizations\", \"GET /orgs/{org}/actions/permissions/repositories\", \"GET /orgs/{org}/actions/runner-groups\", \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories\", \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners\", \"GET /orgs/{org}/actions/runners\", \"GET /orgs/{org}/actions/runners/downloads\", \"GET /orgs/{org}/actions/secrets\", \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\", \"GET /orgs/{org}/blocks\", \"GET /orgs/{org}/credential-authorizations\", \"GET /orgs/{org}/events\", \"GET /orgs/{org}/failed_invitations\", \"GET /orgs/{org}/hooks\", \"GET /orgs/{org}/hooks/{hook_id}/deliveries\", \"GET /orgs/{org}/installations\", \"GET /orgs/{org}/invitations\", \"GET /orgs/{org}/invitations/{invitation_id}/teams\", \"GET /orgs/{org}/issues\", \"GET /orgs/{org}/members\", \"GET /orgs/{org}/migrations\", \"GET /orgs/{org}/migrations/{migration_id}/repositories\", \"GET /orgs/{org}/outside_collaborators\", \"GET /orgs/{org}/packages\", \"GET /orgs/{org}/projects\", \"GET /orgs/{org}/public_members\", \"GET /orgs/{org}/repos\", \"GET /orgs/{org}/secret-scanning/alerts\", \"GET /orgs/{org}/team-sync/groups\", \"GET /orgs/{org}/teams\", \"GET /orgs/{org}/teams/{team_slug}/discussions\", \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\", \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\", \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\", \"GET /orgs/{org}/teams/{team_slug}/invitations\", \"GET /orgs/{org}/teams/{team_slug}/members\", \"GET /orgs/{org}/teams/{team_slug}/projects\", \"GET /orgs/{org}/teams/{team_slug}/repos\", \"GET /orgs/{org}/teams/{team_slug}/team-sync/group-mappings\", \"GET /orgs/{org}/teams/{team_slug}/teams\", \"GET /projects/columns/{column_id}/cards\", \"GET /projects/{project_id}/collaborators\", \"GET /projects/{project_id}/columns\", \"GET /repos/{owner}/{repo}/actions/artifacts\", \"GET /repos/{owner}/{repo}/actions/runners\", \"GET /repos/{owner}/{repo}/actions/runners/downloads\", \"GET /repos/{owner}/{repo}/actions/runs\", \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\", \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\", \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\", \"GET /repos/{owner}/{repo}/actions/secrets\", \"GET /repos/{owner}/{repo}/actions/workflows\", \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\", \"GET /repos/{owner}/{repo}/assignees\", \"GET /repos/{owner}/{repo}/autolinks\", \"GET /repos/{owner}/{repo}/branches\", \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\", \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\", \"GET /repos/{owner}/{repo}/code-scanning/alerts\", \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\", \"GET /repos/{owner}/{repo}/code-scanning/analyses\", \"GET /repos/{owner}/{repo}/collaborators\", \"GET /repos/{owner}/{repo}/comments\", \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\", \"GET /repos/{owner}/{repo}/commits\", \"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\", \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\", \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\", \"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\", \"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\", \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\", \"GET /repos/{owner}/{repo}/contributors\", \"GET /repos/{owner}/{repo}/deployments\", \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\", \"GET /repos/{owner}/{repo}/events\", \"GET /repos/{owner}/{repo}/forks\", \"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\", \"GET /repos/{owner}/{repo}/hooks\", \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\", \"GET /repos/{owner}/{repo}/invitations\", \"GET /repos/{owner}/{repo}/issues\", \"GET /repos/{owner}/{repo}/issues/comments\", \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\", \"GET /repos/{owner}/{repo}/issues/events\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/events\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\", \"GET /repos/{owner}/{repo}/keys\", \"GET /repos/{owner}/{repo}/labels\", \"GET /repos/{owner}/{repo}/milestones\", \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\", \"GET /repos/{owner}/{repo}/notifications\", \"GET /repos/{owner}/{repo}/pages/builds\", \"GET /repos/{owner}/{repo}/projects\", \"GET /repos/{owner}/{repo}/pulls\", \"GET /repos/{owner}/{repo}/pulls/comments\", \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\", \"GET /repos/{owner}/{repo}/releases\", \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\", \"GET /repos/{owner}/{repo}/secret-scanning/alerts\", \"GET /repos/{owner}/{repo}/stargazers\", \"GET /repos/{owner}/{repo}/subscribers\", \"GET /repos/{owner}/{repo}/tags\", \"GET /repos/{owner}/{repo}/teams\", \"GET /repositories\", \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\", \"GET /scim/v2/enterprises/{enterprise}/Groups\", \"GET /scim/v2/enterprises/{enterprise}/Users\", \"GET /scim/v2/organizations/{org}/Users\", \"GET /search/code\", \"GET /search/commits\", \"GET /search/issues\", \"GET /search/labels\", \"GET /search/repositories\", \"GET /search/topics\", \"GET /search/users\", \"GET /teams/{team_id}/discussions\", \"GET /teams/{team_id}/discussions/{discussion_number}/comments\", \"GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions\", \"GET /teams/{team_id}/discussions/{discussion_number}/reactions\", \"GET /teams/{team_id}/invitations\", \"GET /teams/{team_id}/members\", \"GET /teams/{team_id}/projects\", \"GET /teams/{team_id}/repos\", \"GET /teams/{team_id}/team-sync/group-mappings\", \"GET /teams/{team_id}/teams\", \"GET /user/blocks\", \"GET /user/emails\", \"GET /user/followers\", \"GET /user/following\", \"GET /user/gpg_keys\", \"GET /user/installations\", \"GET /user/installations/{installation_id}/repositories\", \"GET /user/issues\", \"GET /user/keys\", \"GET /user/marketplace_purchases\", \"GET /user/marketplace_purchases/stubbed\", \"GET /user/memberships/orgs\", \"GET /user/migrations\", \"GET /user/migrations/{migration_id}/repositories\", \"GET /user/orgs\", \"GET /user/packages\", \"GET /user/public_emails\", \"GET /user/repos\", \"GET /user/repository_invitations\", \"GET /user/starred\", \"GET /user/subscriptions\", \"GET /user/teams\", \"GET /users\", \"GET /users/{username}/events\", \"GET /users/{username}/events/orgs/{org}\", \"GET /users/{username}/events/public\", \"GET /users/{username}/followers\", \"GET /users/{username}/following\", \"GET /users/{username}/gists\", \"GET /users/{username}/gpg_keys\", \"GET /users/{username}/keys\", \"GET /users/{username}/orgs\", \"GET /users/{username}/packages\", \"GET /users/{username}/projects\", \"GET /users/{username}/received_events\", \"GET /users/{username}/received_events/public\", \"GET /users/{username}/repos\", \"GET /users/{username}/starred\", \"GET /users/{username}/subscriptions\"];\n\nfunction isPaginatingEndpoint(arg) {\n if (typeof arg === \"string\") {\n return paginatingEndpoints.includes(arg);\n } else {\n return false;\n }\n}\n\n/**\n * @param octokit Octokit instance\n * @param options Options passed to Octokit constructor\n */\n\nfunction paginateRest(octokit) {\n return {\n paginate: Object.assign(paginate.bind(null, octokit), {\n iterator: iterator.bind(null, octokit)\n })\n };\n}\npaginateRest.VERSION = VERSION;\n\nexports.composePaginateRest = composePaginateRest;\nexports.isPaginatingEndpoint = isPaginatingEndpoint;\nexports.paginateRest = paginateRest;\nexports.paginatingEndpoints = paginatingEndpoints;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction ownKeys(object, enumerableOnly) {\n var keys = Object.keys(object);\n\n if (Object.getOwnPropertySymbols) {\n var symbols = Object.getOwnPropertySymbols(object);\n\n if (enumerableOnly) {\n symbols = symbols.filter(function (sym) {\n return Object.getOwnPropertyDescriptor(object, sym).enumerable;\n });\n }\n\n keys.push.apply(keys, symbols);\n }\n\n return keys;\n}\n\nfunction _objectSpread2(target) {\n for (var i = 1; i < arguments.length; i++) {\n var source = arguments[i] != null ? arguments[i] : {};\n\n if (i % 2) {\n ownKeys(Object(source), true).forEach(function (key) {\n _defineProperty(target, key, source[key]);\n });\n } else if (Object.getOwnPropertyDescriptors) {\n Object.defineProperties(target, Object.getOwnPropertyDescriptors(source));\n } else {\n ownKeys(Object(source)).forEach(function (key) {\n Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));\n });\n }\n }\n\n return target;\n}\n\nfunction _defineProperty(obj, key, value) {\n if (key in obj) {\n Object.defineProperty(obj, key, {\n value: value,\n enumerable: true,\n configurable: true,\n writable: true\n });\n } else {\n obj[key] = value;\n }\n\n return obj;\n}\n\nconst Endpoints = {\n actions: {\n addSelectedRepoToOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"],\n approveWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve\"],\n cancelWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel\"],\n createOrUpdateEnvironmentSecret: [\"PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n createRegistrationTokenForOrg: [\"POST /orgs/{org}/actions/runners/registration-token\"],\n createRegistrationTokenForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/registration-token\"],\n createRemoveTokenForOrg: [\"POST /orgs/{org}/actions/runners/remove-token\"],\n createRemoveTokenForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/remove-token\"],\n createWorkflowDispatch: [\"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches\"],\n deleteArtifact: [\"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n deleteEnvironmentSecret: [\"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"],\n deleteOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}\"],\n deleteRepoSecret: [\"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n deleteSelfHostedRunnerFromOrg: [\"DELETE /orgs/{org}/actions/runners/{runner_id}\"],\n deleteSelfHostedRunnerFromRepo: [\"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}\"],\n deleteWorkflowRun: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n deleteWorkflowRunLogs: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"],\n disableSelectedRepositoryGithubActionsOrganization: [\"DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}\"],\n disableWorkflow: [\"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable\"],\n downloadArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}\"],\n downloadJobLogsForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs\"],\n downloadWorkflowRunAttemptLogs: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs\"],\n downloadWorkflowRunLogs: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"],\n enableSelectedRepositoryGithubActionsOrganization: [\"PUT /orgs/{org}/actions/permissions/repositories/{repository_id}\"],\n enableWorkflow: [\"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable\"],\n getAllowedActionsOrganization: [\"GET /orgs/{org}/actions/permissions/selected-actions\"],\n getAllowedActionsRepository: [\"GET /repos/{owner}/{repo}/actions/permissions/selected-actions\"],\n getArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n getEnvironmentPublicKey: [\"GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key\"],\n getEnvironmentSecret: [\"GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"],\n getGithubActionsPermissionsOrganization: [\"GET /orgs/{org}/actions/permissions\"],\n getGithubActionsPermissionsRepository: [\"GET /repos/{owner}/{repo}/actions/permissions\"],\n getJobForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/actions/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}\"],\n getPendingDeploymentsForRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"],\n getRepoPermissions: [\"GET /repos/{owner}/{repo}/actions/permissions\", {}, {\n renamed: [\"actions\", \"getGithubActionsPermissionsRepository\"]\n }],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/actions/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n getReviewsForRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals\"],\n getSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}\"],\n getSelfHostedRunnerForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/{runner_id}\"],\n getWorkflow: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}\"],\n getWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n getWorkflowRunAttempt: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}\"],\n getWorkflowRunUsage: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing\"],\n getWorkflowUsage: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing\"],\n listArtifactsForRepo: [\"GET /repos/{owner}/{repo}/actions/artifacts\"],\n listEnvironmentSecrets: [\"GET /repositories/{repository_id}/environments/{environment_name}/secrets\"],\n listJobsForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\"],\n listJobsForWorkflowRunAttempt: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\"],\n listOrgSecrets: [\"GET /orgs/{org}/actions/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/actions/secrets\"],\n listRepoWorkflows: [\"GET /repos/{owner}/{repo}/actions/workflows\"],\n listRunnerApplicationsForOrg: [\"GET /orgs/{org}/actions/runners/downloads\"],\n listRunnerApplicationsForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/downloads\"],\n listSelectedReposForOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\"],\n listSelectedRepositoriesEnabledGithubActionsOrganization: [\"GET /orgs/{org}/actions/permissions/repositories\"],\n listSelfHostedRunnersForOrg: [\"GET /orgs/{org}/actions/runners\"],\n listSelfHostedRunnersForRepo: [\"GET /repos/{owner}/{repo}/actions/runners\"],\n listWorkflowRunArtifacts: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\"],\n listWorkflowRuns: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\"],\n listWorkflowRunsForRepo: [\"GET /repos/{owner}/{repo}/actions/runs\"],\n removeSelectedRepoFromOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"],\n reviewPendingDeploymentsForRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"],\n setAllowedActionsOrganization: [\"PUT /orgs/{org}/actions/permissions/selected-actions\"],\n setAllowedActionsRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions/selected-actions\"],\n setGithubActionsPermissionsOrganization: [\"PUT /orgs/{org}/actions/permissions\"],\n setGithubActionsPermissionsRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions\"],\n setSelectedReposForOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories\"],\n setSelectedRepositoriesEnabledGithubActionsOrganization: [\"PUT /orgs/{org}/actions/permissions/repositories\"]\n },\n activity: {\n checkRepoIsStarredByAuthenticatedUser: [\"GET /user/starred/{owner}/{repo}\"],\n deleteRepoSubscription: [\"DELETE /repos/{owner}/{repo}/subscription\"],\n deleteThreadSubscription: [\"DELETE /notifications/threads/{thread_id}/subscription\"],\n getFeeds: [\"GET /feeds\"],\n getRepoSubscription: [\"GET /repos/{owner}/{repo}/subscription\"],\n getThread: [\"GET /notifications/threads/{thread_id}\"],\n getThreadSubscriptionForAuthenticatedUser: [\"GET /notifications/threads/{thread_id}/subscription\"],\n listEventsForAuthenticatedUser: [\"GET /users/{username}/events\"],\n listNotificationsForAuthenticatedUser: [\"GET /notifications\"],\n listOrgEventsForAuthenticatedUser: [\"GET /users/{username}/events/orgs/{org}\"],\n listPublicEvents: [\"GET /events\"],\n listPublicEventsForRepoNetwork: [\"GET /networks/{owner}/{repo}/events\"],\n listPublicEventsForUser: [\"GET /users/{username}/events/public\"],\n listPublicOrgEvents: [\"GET /orgs/{org}/events\"],\n listReceivedEventsForUser: [\"GET /users/{username}/received_events\"],\n listReceivedPublicEventsForUser: [\"GET /users/{username}/received_events/public\"],\n listRepoEvents: [\"GET /repos/{owner}/{repo}/events\"],\n listRepoNotificationsForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/notifications\"],\n listReposStarredByAuthenticatedUser: [\"GET /user/starred\"],\n listReposStarredByUser: [\"GET /users/{username}/starred\"],\n listReposWatchedByUser: [\"GET /users/{username}/subscriptions\"],\n listStargazersForRepo: [\"GET /repos/{owner}/{repo}/stargazers\"],\n listWatchedReposForAuthenticatedUser: [\"GET /user/subscriptions\"],\n listWatchersForRepo: [\"GET /repos/{owner}/{repo}/subscribers\"],\n markNotificationsAsRead: [\"PUT /notifications\"],\n markRepoNotificationsAsRead: [\"PUT /repos/{owner}/{repo}/notifications\"],\n markThreadAsRead: [\"PATCH /notifications/threads/{thread_id}\"],\n setRepoSubscription: [\"PUT /repos/{owner}/{repo}/subscription\"],\n setThreadSubscription: [\"PUT /notifications/threads/{thread_id}/subscription\"],\n starRepoForAuthenticatedUser: [\"PUT /user/starred/{owner}/{repo}\"],\n unstarRepoForAuthenticatedUser: [\"DELETE /user/starred/{owner}/{repo}\"]\n },\n apps: {\n addRepoToInstallation: [\"PUT /user/installations/{installation_id}/repositories/{repository_id}\", {}, {\n renamed: [\"apps\", \"addRepoToInstallationForAuthenticatedUser\"]\n }],\n addRepoToInstallationForAuthenticatedUser: [\"PUT /user/installations/{installation_id}/repositories/{repository_id}\"],\n checkToken: [\"POST /applications/{client_id}/token\"],\n createContentAttachment: [\"POST /content_references/{content_reference_id}/attachments\", {\n mediaType: {\n previews: [\"corsair\"]\n }\n }],\n createContentAttachmentForRepo: [\"POST /repos/{owner}/{repo}/content_references/{content_reference_id}/attachments\", {\n mediaType: {\n previews: [\"corsair\"]\n }\n }],\n createFromManifest: [\"POST /app-manifests/{code}/conversions\"],\n createInstallationAccessToken: [\"POST /app/installations/{installation_id}/access_tokens\"],\n deleteAuthorization: [\"DELETE /applications/{client_id}/grant\"],\n deleteInstallation: [\"DELETE /app/installations/{installation_id}\"],\n deleteToken: [\"DELETE /applications/{client_id}/token\"],\n getAuthenticated: [\"GET /app\"],\n getBySlug: [\"GET /apps/{app_slug}\"],\n getInstallation: [\"GET /app/installations/{installation_id}\"],\n getOrgInstallation: [\"GET /orgs/{org}/installation\"],\n getRepoInstallation: [\"GET /repos/{owner}/{repo}/installation\"],\n getSubscriptionPlanForAccount: [\"GET /marketplace_listing/accounts/{account_id}\"],\n getSubscriptionPlanForAccountStubbed: [\"GET /marketplace_listing/stubbed/accounts/{account_id}\"],\n getUserInstallation: [\"GET /users/{username}/installation\"],\n getWebhookConfigForApp: [\"GET /app/hook/config\"],\n getWebhookDelivery: [\"GET /app/hook/deliveries/{delivery_id}\"],\n listAccountsForPlan: [\"GET /marketplace_listing/plans/{plan_id}/accounts\"],\n listAccountsForPlanStubbed: [\"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\"],\n listInstallationReposForAuthenticatedUser: [\"GET /user/installations/{installation_id}/repositories\"],\n listInstallations: [\"GET /app/installations\"],\n listInstallationsForAuthenticatedUser: [\"GET /user/installations\"],\n listPlans: [\"GET /marketplace_listing/plans\"],\n listPlansStubbed: [\"GET /marketplace_listing/stubbed/plans\"],\n listReposAccessibleToInstallation: [\"GET /installation/repositories\"],\n listSubscriptionsForAuthenticatedUser: [\"GET /user/marketplace_purchases\"],\n listSubscriptionsForAuthenticatedUserStubbed: [\"GET /user/marketplace_purchases/stubbed\"],\n listWebhookDeliveries: [\"GET /app/hook/deliveries\"],\n redeliverWebhookDelivery: [\"POST /app/hook/deliveries/{delivery_id}/attempts\"],\n removeRepoFromInstallation: [\"DELETE /user/installations/{installation_id}/repositories/{repository_id}\", {}, {\n renamed: [\"apps\", \"removeRepoFromInstallationForAuthenticatedUser\"]\n }],\n removeRepoFromInstallationForAuthenticatedUser: [\"DELETE /user/installations/{installation_id}/repositories/{repository_id}\"],\n resetToken: [\"PATCH /applications/{client_id}/token\"],\n revokeInstallationAccessToken: [\"DELETE /installation/token\"],\n scopeToken: [\"POST /applications/{client_id}/token/scoped\"],\n suspendInstallation: [\"PUT /app/installations/{installation_id}/suspended\"],\n unsuspendInstallation: [\"DELETE /app/installations/{installation_id}/suspended\"],\n updateWebhookConfigForApp: [\"PATCH /app/hook/config\"]\n },\n billing: {\n getGithubActionsBillingOrg: [\"GET /orgs/{org}/settings/billing/actions\"],\n getGithubActionsBillingUser: [\"GET /users/{username}/settings/billing/actions\"],\n getGithubPackagesBillingOrg: [\"GET /orgs/{org}/settings/billing/packages\"],\n getGithubPackagesBillingUser: [\"GET /users/{username}/settings/billing/packages\"],\n getSharedStorageBillingOrg: [\"GET /orgs/{org}/settings/billing/shared-storage\"],\n getSharedStorageBillingUser: [\"GET /users/{username}/settings/billing/shared-storage\"]\n },\n checks: {\n create: [\"POST /repos/{owner}/{repo}/check-runs\"],\n createSuite: [\"POST /repos/{owner}/{repo}/check-suites\"],\n get: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n getSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}\"],\n listAnnotations: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\"],\n listForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\"],\n listForSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\"],\n listSuitesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\"],\n rerequestRun: [\"POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest\"],\n rerequestSuite: [\"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest\"],\n setSuitesPreferences: [\"PATCH /repos/{owner}/{repo}/check-suites/preferences\"],\n update: [\"PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}\"]\n },\n codeScanning: {\n deleteAnalysis: [\"DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}\"],\n getAlert: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\", {}, {\n renamedParameters: {\n alert_id: \"alert_number\"\n }\n }],\n getAnalysis: [\"GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}\"],\n getSarif: [\"GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}\"],\n listAlertInstances: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/code-scanning/alerts\"],\n listAlertsInstances: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\", {}, {\n renamed: [\"codeScanning\", \"listAlertInstances\"]\n }],\n listRecentAnalyses: [\"GET /repos/{owner}/{repo}/code-scanning/analyses\"],\n updateAlert: [\"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\"],\n uploadSarif: [\"POST /repos/{owner}/{repo}/code-scanning/sarifs\"]\n },\n codesOfConduct: {\n getAllCodesOfConduct: [\"GET /codes_of_conduct\"],\n getConductCode: [\"GET /codes_of_conduct/{key}\"]\n },\n emojis: {\n get: [\"GET /emojis\"]\n },\n enterpriseAdmin: {\n disableSelectedOrganizationGithubActionsEnterprise: [\"DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\"],\n enableSelectedOrganizationGithubActionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\"],\n getAllowedActionsEnterprise: [\"GET /enterprises/{enterprise}/actions/permissions/selected-actions\"],\n getGithubActionsPermissionsEnterprise: [\"GET /enterprises/{enterprise}/actions/permissions\"],\n listSelectedOrganizationsEnabledGithubActionsEnterprise: [\"GET /enterprises/{enterprise}/actions/permissions/organizations\"],\n setAllowedActionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/selected-actions\"],\n setGithubActionsPermissionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions\"],\n setSelectedOrganizationsEnabledGithubActionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/organizations\"]\n },\n gists: {\n checkIsStarred: [\"GET /gists/{gist_id}/star\"],\n create: [\"POST /gists\"],\n createComment: [\"POST /gists/{gist_id}/comments\"],\n delete: [\"DELETE /gists/{gist_id}\"],\n deleteComment: [\"DELETE /gists/{gist_id}/comments/{comment_id}\"],\n fork: [\"POST /gists/{gist_id}/forks\"],\n get: [\"GET /gists/{gist_id}\"],\n getComment: [\"GET /gists/{gist_id}/comments/{comment_id}\"],\n getRevision: [\"GET /gists/{gist_id}/{sha}\"],\n list: [\"GET /gists\"],\n listComments: [\"GET /gists/{gist_id}/comments\"],\n listCommits: [\"GET /gists/{gist_id}/commits\"],\n listForUser: [\"GET /users/{username}/gists\"],\n listForks: [\"GET /gists/{gist_id}/forks\"],\n listPublic: [\"GET /gists/public\"],\n listStarred: [\"GET /gists/starred\"],\n star: [\"PUT /gists/{gist_id}/star\"],\n unstar: [\"DELETE /gists/{gist_id}/star\"],\n update: [\"PATCH /gists/{gist_id}\"],\n updateComment: [\"PATCH /gists/{gist_id}/comments/{comment_id}\"]\n },\n git: {\n createBlob: [\"POST /repos/{owner}/{repo}/git/blobs\"],\n createCommit: [\"POST /repos/{owner}/{repo}/git/commits\"],\n createRef: [\"POST /repos/{owner}/{repo}/git/refs\"],\n createTag: [\"POST /repos/{owner}/{repo}/git/tags\"],\n createTree: [\"POST /repos/{owner}/{repo}/git/trees\"],\n deleteRef: [\"DELETE /repos/{owner}/{repo}/git/refs/{ref}\"],\n getBlob: [\"GET /repos/{owner}/{repo}/git/blobs/{file_sha}\"],\n getCommit: [\"GET /repos/{owner}/{repo}/git/commits/{commit_sha}\"],\n getRef: [\"GET /repos/{owner}/{repo}/git/ref/{ref}\"],\n getTag: [\"GET /repos/{owner}/{repo}/git/tags/{tag_sha}\"],\n getTree: [\"GET /repos/{owner}/{repo}/git/trees/{tree_sha}\"],\n listMatchingRefs: [\"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\"],\n updateRef: [\"PATCH /repos/{owner}/{repo}/git/refs/{ref}\"]\n },\n gitignore: {\n getAllTemplates: [\"GET /gitignore/templates\"],\n getTemplate: [\"GET /gitignore/templates/{name}\"]\n },\n interactions: {\n getRestrictionsForAuthenticatedUser: [\"GET /user/interaction-limits\"],\n getRestrictionsForOrg: [\"GET /orgs/{org}/interaction-limits\"],\n getRestrictionsForRepo: [\"GET /repos/{owner}/{repo}/interaction-limits\"],\n getRestrictionsForYourPublicRepos: [\"GET /user/interaction-limits\", {}, {\n renamed: [\"interactions\", \"getRestrictionsForAuthenticatedUser\"]\n }],\n removeRestrictionsForAuthenticatedUser: [\"DELETE /user/interaction-limits\"],\n removeRestrictionsForOrg: [\"DELETE /orgs/{org}/interaction-limits\"],\n removeRestrictionsForRepo: [\"DELETE /repos/{owner}/{repo}/interaction-limits\"],\n removeRestrictionsForYourPublicRepos: [\"DELETE /user/interaction-limits\", {}, {\n renamed: [\"interactions\", \"removeRestrictionsForAuthenticatedUser\"]\n }],\n setRestrictionsForAuthenticatedUser: [\"PUT /user/interaction-limits\"],\n setRestrictionsForOrg: [\"PUT /orgs/{org}/interaction-limits\"],\n setRestrictionsForRepo: [\"PUT /repos/{owner}/{repo}/interaction-limits\"],\n setRestrictionsForYourPublicRepos: [\"PUT /user/interaction-limits\", {}, {\n renamed: [\"interactions\", \"setRestrictionsForAuthenticatedUser\"]\n }]\n },\n issues: {\n addAssignees: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees\"],\n addLabels: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n checkUserCanBeAssigned: [\"GET /repos/{owner}/{repo}/assignees/{assignee}\"],\n create: [\"POST /repos/{owner}/{repo}/issues\"],\n createComment: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n createLabel: [\"POST /repos/{owner}/{repo}/labels\"],\n createMilestone: [\"POST /repos/{owner}/{repo}/milestones\"],\n deleteComment: [\"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n deleteLabel: [\"DELETE /repos/{owner}/{repo}/labels/{name}\"],\n deleteMilestone: [\"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n get: [\"GET /repos/{owner}/{repo}/issues/{issue_number}\"],\n getComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n getEvent: [\"GET /repos/{owner}/{repo}/issues/events/{event_id}\"],\n getLabel: [\"GET /repos/{owner}/{repo}/labels/{name}\"],\n getMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n list: [\"GET /issues\"],\n listAssignees: [\"GET /repos/{owner}/{repo}/assignees\"],\n listComments: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n listCommentsForRepo: [\"GET /repos/{owner}/{repo}/issues/comments\"],\n listEvents: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/events\"],\n listEventsForRepo: [\"GET /repos/{owner}/{repo}/issues/events\"],\n listEventsForTimeline: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\"],\n listForAuthenticatedUser: [\"GET /user/issues\"],\n listForOrg: [\"GET /orgs/{org}/issues\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/issues\"],\n listLabelsForMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\"],\n listLabelsForRepo: [\"GET /repos/{owner}/{repo}/labels\"],\n listLabelsOnIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n listMilestones: [\"GET /repos/{owner}/{repo}/milestones\"],\n lock: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n removeAllLabels: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n removeAssignees: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees\"],\n removeLabel: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}\"],\n setLabels: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n unlock: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n update: [\"PATCH /repos/{owner}/{repo}/issues/{issue_number}\"],\n updateComment: [\"PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n updateLabel: [\"PATCH /repos/{owner}/{repo}/labels/{name}\"],\n updateMilestone: [\"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}\"]\n },\n licenses: {\n get: [\"GET /licenses/{license}\"],\n getAllCommonlyUsed: [\"GET /licenses\"],\n getForRepo: [\"GET /repos/{owner}/{repo}/license\"]\n },\n markdown: {\n render: [\"POST /markdown\"],\n renderRaw: [\"POST /markdown/raw\", {\n headers: {\n \"content-type\": \"text/plain; charset=utf-8\"\n }\n }]\n },\n meta: {\n get: [\"GET /meta\"],\n getOctocat: [\"GET /octocat\"],\n getZen: [\"GET /zen\"],\n root: [\"GET /\"]\n },\n migrations: {\n cancelImport: [\"DELETE /repos/{owner}/{repo}/import\"],\n deleteArchiveForAuthenticatedUser: [\"DELETE /user/migrations/{migration_id}/archive\"],\n deleteArchiveForOrg: [\"DELETE /orgs/{org}/migrations/{migration_id}/archive\"],\n downloadArchiveForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/archive\"],\n getArchiveForAuthenticatedUser: [\"GET /user/migrations/{migration_id}/archive\"],\n getCommitAuthors: [\"GET /repos/{owner}/{repo}/import/authors\"],\n getImportStatus: [\"GET /repos/{owner}/{repo}/import\"],\n getLargeFiles: [\"GET /repos/{owner}/{repo}/import/large_files\"],\n getStatusForAuthenticatedUser: [\"GET /user/migrations/{migration_id}\"],\n getStatusForOrg: [\"GET /orgs/{org}/migrations/{migration_id}\"],\n listForAuthenticatedUser: [\"GET /user/migrations\"],\n listForOrg: [\"GET /orgs/{org}/migrations\"],\n listReposForAuthenticatedUser: [\"GET /user/migrations/{migration_id}/repositories\"],\n listReposForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/repositories\"],\n listReposForUser: [\"GET /user/migrations/{migration_id}/repositories\", {}, {\n renamed: [\"migrations\", \"listReposForAuthenticatedUser\"]\n }],\n mapCommitAuthor: [\"PATCH /repos/{owner}/{repo}/import/authors/{author_id}\"],\n setLfsPreference: [\"PATCH /repos/{owner}/{repo}/import/lfs\"],\n startForAuthenticatedUser: [\"POST /user/migrations\"],\n startForOrg: [\"POST /orgs/{org}/migrations\"],\n startImport: [\"PUT /repos/{owner}/{repo}/import\"],\n unlockRepoForAuthenticatedUser: [\"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock\"],\n unlockRepoForOrg: [\"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock\"],\n updateImport: [\"PATCH /repos/{owner}/{repo}/import\"]\n },\n orgs: {\n blockUser: [\"PUT /orgs/{org}/blocks/{username}\"],\n cancelInvitation: [\"DELETE /orgs/{org}/invitations/{invitation_id}\"],\n checkBlockedUser: [\"GET /orgs/{org}/blocks/{username}\"],\n checkMembershipForUser: [\"GET /orgs/{org}/members/{username}\"],\n checkPublicMembershipForUser: [\"GET /orgs/{org}/public_members/{username}\"],\n convertMemberToOutsideCollaborator: [\"PUT /orgs/{org}/outside_collaborators/{username}\"],\n createInvitation: [\"POST /orgs/{org}/invitations\"],\n createWebhook: [\"POST /orgs/{org}/hooks\"],\n deleteWebhook: [\"DELETE /orgs/{org}/hooks/{hook_id}\"],\n get: [\"GET /orgs/{org}\"],\n getMembershipForAuthenticatedUser: [\"GET /user/memberships/orgs/{org}\"],\n getMembershipForUser: [\"GET /orgs/{org}/memberships/{username}\"],\n getWebhook: [\"GET /orgs/{org}/hooks/{hook_id}\"],\n getWebhookConfigForOrg: [\"GET /orgs/{org}/hooks/{hook_id}/config\"],\n getWebhookDelivery: [\"GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}\"],\n list: [\"GET /organizations\"],\n listAppInstallations: [\"GET /orgs/{org}/installations\"],\n listBlockedUsers: [\"GET /orgs/{org}/blocks\"],\n listFailedInvitations: [\"GET /orgs/{org}/failed_invitations\"],\n listForAuthenticatedUser: [\"GET /user/orgs\"],\n listForUser: [\"GET /users/{username}/orgs\"],\n listInvitationTeams: [\"GET /orgs/{org}/invitations/{invitation_id}/teams\"],\n listMembers: [\"GET /orgs/{org}/members\"],\n listMembershipsForAuthenticatedUser: [\"GET /user/memberships/orgs\"],\n listOutsideCollaborators: [\"GET /orgs/{org}/outside_collaborators\"],\n listPendingInvitations: [\"GET /orgs/{org}/invitations\"],\n listPublicMembers: [\"GET /orgs/{org}/public_members\"],\n listWebhookDeliveries: [\"GET /orgs/{org}/hooks/{hook_id}/deliveries\"],\n listWebhooks: [\"GET /orgs/{org}/hooks\"],\n pingWebhook: [\"POST /orgs/{org}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\"POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\"],\n removeMember: [\"DELETE /orgs/{org}/members/{username}\"],\n removeMembershipForUser: [\"DELETE /orgs/{org}/memberships/{username}\"],\n removeOutsideCollaborator: [\"DELETE /orgs/{org}/outside_collaborators/{username}\"],\n removePublicMembershipForAuthenticatedUser: [\"DELETE /orgs/{org}/public_members/{username}\"],\n setMembershipForUser: [\"PUT /orgs/{org}/memberships/{username}\"],\n setPublicMembershipForAuthenticatedUser: [\"PUT /orgs/{org}/public_members/{username}\"],\n unblockUser: [\"DELETE /orgs/{org}/blocks/{username}\"],\n update: [\"PATCH /orgs/{org}\"],\n updateMembershipForAuthenticatedUser: [\"PATCH /user/memberships/orgs/{org}\"],\n updateWebhook: [\"PATCH /orgs/{org}/hooks/{hook_id}\"],\n updateWebhookConfigForOrg: [\"PATCH /orgs/{org}/hooks/{hook_id}/config\"]\n },\n packages: {\n deletePackageForAuthenticatedUser: [\"DELETE /user/packages/{package_type}/{package_name}\"],\n deletePackageForOrg: [\"DELETE /orgs/{org}/packages/{package_type}/{package_name}\"],\n deletePackageForUser: [\"DELETE /users/{username}/packages/{package_type}/{package_name}\"],\n deletePackageVersionForAuthenticatedUser: [\"DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n deletePackageVersionForOrg: [\"DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n deletePackageVersionForUser: [\"DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n getAllPackageVersionsForAPackageOwnedByAnOrg: [\"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\", {}, {\n renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByOrg\"]\n }],\n getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}/versions\", {}, {\n renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByAuthenticatedUser\"]\n }],\n getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}/versions\"],\n getAllPackageVersionsForPackageOwnedByOrg: [\"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\"],\n getAllPackageVersionsForPackageOwnedByUser: [\"GET /users/{username}/packages/{package_type}/{package_name}/versions\"],\n getPackageForAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}\"],\n getPackageForOrganization: [\"GET /orgs/{org}/packages/{package_type}/{package_name}\"],\n getPackageForUser: [\"GET /users/{username}/packages/{package_type}/{package_name}\"],\n getPackageVersionForAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n getPackageVersionForOrganization: [\"GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n getPackageVersionForUser: [\"GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n listPackagesForAuthenticatedUser: [\"GET /user/packages\"],\n listPackagesForOrganization: [\"GET /orgs/{org}/packages\"],\n listPackagesForUser: [\"GET /users/{username}/packages\"],\n restorePackageForAuthenticatedUser: [\"POST /user/packages/{package_type}/{package_name}/restore{?token}\"],\n restorePackageForOrg: [\"POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}\"],\n restorePackageForUser: [\"POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}\"],\n restorePackageVersionForAuthenticatedUser: [\"POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"],\n restorePackageVersionForOrg: [\"POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"],\n restorePackageVersionForUser: [\"POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"]\n },\n projects: {\n addCollaborator: [\"PUT /projects/{project_id}/collaborators/{username}\"],\n createCard: [\"POST /projects/columns/{column_id}/cards\"],\n createColumn: [\"POST /projects/{project_id}/columns\"],\n createForAuthenticatedUser: [\"POST /user/projects\"],\n createForOrg: [\"POST /orgs/{org}/projects\"],\n createForRepo: [\"POST /repos/{owner}/{repo}/projects\"],\n delete: [\"DELETE /projects/{project_id}\"],\n deleteCard: [\"DELETE /projects/columns/cards/{card_id}\"],\n deleteColumn: [\"DELETE /projects/columns/{column_id}\"],\n get: [\"GET /projects/{project_id}\"],\n getCard: [\"GET /projects/columns/cards/{card_id}\"],\n getColumn: [\"GET /projects/columns/{column_id}\"],\n getPermissionForUser: [\"GET /projects/{project_id}/collaborators/{username}/permission\"],\n listCards: [\"GET /projects/columns/{column_id}/cards\"],\n listCollaborators: [\"GET /projects/{project_id}/collaborators\"],\n listColumns: [\"GET /projects/{project_id}/columns\"],\n listForOrg: [\"GET /orgs/{org}/projects\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/projects\"],\n listForUser: [\"GET /users/{username}/projects\"],\n moveCard: [\"POST /projects/columns/cards/{card_id}/moves\"],\n moveColumn: [\"POST /projects/columns/{column_id}/moves\"],\n removeCollaborator: [\"DELETE /projects/{project_id}/collaborators/{username}\"],\n update: [\"PATCH /projects/{project_id}\"],\n updateCard: [\"PATCH /projects/columns/cards/{card_id}\"],\n updateColumn: [\"PATCH /projects/columns/{column_id}\"]\n },\n pulls: {\n checkIfMerged: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n create: [\"POST /repos/{owner}/{repo}/pulls\"],\n createReplyForReviewComment: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\"],\n createReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n createReviewComment: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments\"],\n deletePendingReview: [\"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n deleteReviewComment: [\"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n dismissReview: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals\"],\n get: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}\"],\n getReview: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n getReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n list: [\"GET /repos/{owner}/{repo}/pulls\"],\n listCommentsForReview: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\"],\n listCommits: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\"],\n listFiles: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\"],\n listRequestedReviewers: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n listReviewComments: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\"],\n listReviewCommentsForRepo: [\"GET /repos/{owner}/{repo}/pulls/comments\"],\n listReviews: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n merge: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n removeRequestedReviewers: [\"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n requestReviewers: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n submitReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events\"],\n update: [\"PATCH /repos/{owner}/{repo}/pulls/{pull_number}\"],\n updateBranch: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch\"],\n updateReview: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n updateReviewComment: [\"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}\"]\n },\n rateLimit: {\n get: [\"GET /rate_limit\"]\n },\n reactions: {\n createForCommitComment: [\"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions\"],\n createForIssue: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions\"],\n createForIssueComment: [\"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\"],\n createForPullRequestReviewComment: [\"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\"],\n createForRelease: [\"POST /repos/{owner}/{repo}/releases/{release_id}/reactions\"],\n createForTeamDiscussionCommentInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\"],\n createForTeamDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\"],\n deleteForCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}\"],\n deleteForIssue: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}\"],\n deleteForIssueComment: [\"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}\"],\n deleteForPullRequestComment: [\"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}\"],\n deleteForTeamDiscussion: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}\"],\n deleteForTeamDiscussionComment: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}\"],\n listForCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\"],\n listForIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\"],\n listForIssueComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\"],\n listForPullRequestReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\"],\n listForTeamDiscussionCommentInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\"],\n listForTeamDiscussionInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\"]\n },\n repos: {\n acceptInvitation: [\"PATCH /user/repository_invitations/{invitation_id}\", {}, {\n renamed: [\"repos\", \"acceptInvitationForAuthenticatedUser\"]\n }],\n acceptInvitationForAuthenticatedUser: [\"PATCH /user/repository_invitations/{invitation_id}\"],\n addAppAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n addCollaborator: [\"PUT /repos/{owner}/{repo}/collaborators/{username}\"],\n addStatusCheckContexts: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n addTeamAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n addUserAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n checkCollaborator: [\"GET /repos/{owner}/{repo}/collaborators/{username}\"],\n checkVulnerabilityAlerts: [\"GET /repos/{owner}/{repo}/vulnerability-alerts\"],\n compareCommits: [\"GET /repos/{owner}/{repo}/compare/{base}...{head}\"],\n compareCommitsWithBasehead: [\"GET /repos/{owner}/{repo}/compare/{basehead}\"],\n createAutolink: [\"POST /repos/{owner}/{repo}/autolinks\"],\n createCommitComment: [\"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments\"],\n createCommitSignatureProtection: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"],\n createCommitStatus: [\"POST /repos/{owner}/{repo}/statuses/{sha}\"],\n createDeployKey: [\"POST /repos/{owner}/{repo}/keys\"],\n createDeployment: [\"POST /repos/{owner}/{repo}/deployments\"],\n createDeploymentStatus: [\"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"],\n createDispatchEvent: [\"POST /repos/{owner}/{repo}/dispatches\"],\n createForAuthenticatedUser: [\"POST /user/repos\"],\n createFork: [\"POST /repos/{owner}/{repo}/forks\"],\n createInOrg: [\"POST /orgs/{org}/repos\"],\n createOrUpdateEnvironment: [\"PUT /repos/{owner}/{repo}/environments/{environment_name}\"],\n createOrUpdateFileContents: [\"PUT /repos/{owner}/{repo}/contents/{path}\"],\n createPagesSite: [\"POST /repos/{owner}/{repo}/pages\"],\n createRelease: [\"POST /repos/{owner}/{repo}/releases\"],\n createUsingTemplate: [\"POST /repos/{template_owner}/{template_repo}/generate\"],\n createWebhook: [\"POST /repos/{owner}/{repo}/hooks\"],\n declineInvitation: [\"DELETE /user/repository_invitations/{invitation_id}\", {}, {\n renamed: [\"repos\", \"declineInvitationForAuthenticatedUser\"]\n }],\n declineInvitationForAuthenticatedUser: [\"DELETE /user/repository_invitations/{invitation_id}\"],\n delete: [\"DELETE /repos/{owner}/{repo}\"],\n deleteAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"],\n deleteAdminBranchProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n deleteAnEnvironment: [\"DELETE /repos/{owner}/{repo}/environments/{environment_name}\"],\n deleteAutolink: [\"DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n deleteBranchProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection\"],\n deleteCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}\"],\n deleteCommitSignatureProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"],\n deleteDeployKey: [\"DELETE /repos/{owner}/{repo}/keys/{key_id}\"],\n deleteDeployment: [\"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n deleteFile: [\"DELETE /repos/{owner}/{repo}/contents/{path}\"],\n deleteInvitation: [\"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}\"],\n deletePagesSite: [\"DELETE /repos/{owner}/{repo}/pages\"],\n deletePullRequestReviewProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n deleteRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}\"],\n deleteReleaseAsset: [\"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n deleteWebhook: [\"DELETE /repos/{owner}/{repo}/hooks/{hook_id}\"],\n disableAutomatedSecurityFixes: [\"DELETE /repos/{owner}/{repo}/automated-security-fixes\"],\n disableLfsForRepo: [\"DELETE /repos/{owner}/{repo}/lfs\"],\n disableVulnerabilityAlerts: [\"DELETE /repos/{owner}/{repo}/vulnerability-alerts\"],\n downloadArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\", {}, {\n renamed: [\"repos\", \"downloadZipballArchive\"]\n }],\n downloadTarballArchive: [\"GET /repos/{owner}/{repo}/tarball/{ref}\"],\n downloadZipballArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\"],\n enableAutomatedSecurityFixes: [\"PUT /repos/{owner}/{repo}/automated-security-fixes\"],\n enableLfsForRepo: [\"PUT /repos/{owner}/{repo}/lfs\"],\n enableVulnerabilityAlerts: [\"PUT /repos/{owner}/{repo}/vulnerability-alerts\"],\n generateReleaseNotes: [\"POST /repos/{owner}/{repo}/releases/generate-notes\"],\n get: [\"GET /repos/{owner}/{repo}\"],\n getAccessRestrictions: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"],\n getAdminBranchProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n getAllEnvironments: [\"GET /repos/{owner}/{repo}/environments\"],\n getAllStatusCheckContexts: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\"],\n getAllTopics: [\"GET /repos/{owner}/{repo}/topics\", {\n mediaType: {\n previews: [\"mercy\"]\n }\n }],\n getAppsWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\"],\n getAutolink: [\"GET /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n getBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}\"],\n getBranchProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection\"],\n getClones: [\"GET /repos/{owner}/{repo}/traffic/clones\"],\n getCodeFrequencyStats: [\"GET /repos/{owner}/{repo}/stats/code_frequency\"],\n getCollaboratorPermissionLevel: [\"GET /repos/{owner}/{repo}/collaborators/{username}/permission\"],\n getCombinedStatusForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/status\"],\n getCommit: [\"GET /repos/{owner}/{repo}/commits/{ref}\"],\n getCommitActivityStats: [\"GET /repos/{owner}/{repo}/stats/commit_activity\"],\n getCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}\"],\n getCommitSignatureProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"],\n getCommunityProfileMetrics: [\"GET /repos/{owner}/{repo}/community/profile\"],\n getContent: [\"GET /repos/{owner}/{repo}/contents/{path}\"],\n getContributorsStats: [\"GET /repos/{owner}/{repo}/stats/contributors\"],\n getDeployKey: [\"GET /repos/{owner}/{repo}/keys/{key_id}\"],\n getDeployment: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n getDeploymentStatus: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}\"],\n getEnvironment: [\"GET /repos/{owner}/{repo}/environments/{environment_name}\"],\n getLatestPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/latest\"],\n getLatestRelease: [\"GET /repos/{owner}/{repo}/releases/latest\"],\n getPages: [\"GET /repos/{owner}/{repo}/pages\"],\n getPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/{build_id}\"],\n getPagesHealthCheck: [\"GET /repos/{owner}/{repo}/pages/health\"],\n getParticipationStats: [\"GET /repos/{owner}/{repo}/stats/participation\"],\n getPullRequestReviewProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n getPunchCardStats: [\"GET /repos/{owner}/{repo}/stats/punch_card\"],\n getReadme: [\"GET /repos/{owner}/{repo}/readme\"],\n getReadmeInDirectory: [\"GET /repos/{owner}/{repo}/readme/{dir}\"],\n getRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}\"],\n getReleaseAsset: [\"GET /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n getReleaseByTag: [\"GET /repos/{owner}/{repo}/releases/tags/{tag}\"],\n getStatusChecksProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n getTeamsWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\"],\n getTopPaths: [\"GET /repos/{owner}/{repo}/traffic/popular/paths\"],\n getTopReferrers: [\"GET /repos/{owner}/{repo}/traffic/popular/referrers\"],\n getUsersWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\"],\n getViews: [\"GET /repos/{owner}/{repo}/traffic/views\"],\n getWebhook: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}\"],\n getWebhookConfigForRepo: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}/config\"],\n getWebhookDelivery: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}\"],\n listAutolinks: [\"GET /repos/{owner}/{repo}/autolinks\"],\n listBranches: [\"GET /repos/{owner}/{repo}/branches\"],\n listBranchesForHeadCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\"],\n listCollaborators: [\"GET /repos/{owner}/{repo}/collaborators\"],\n listCommentsForCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\"],\n listCommitCommentsForRepo: [\"GET /repos/{owner}/{repo}/comments\"],\n listCommitStatusesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/statuses\"],\n listCommits: [\"GET /repos/{owner}/{repo}/commits\"],\n listContributors: [\"GET /repos/{owner}/{repo}/contributors\"],\n listDeployKeys: [\"GET /repos/{owner}/{repo}/keys\"],\n listDeploymentStatuses: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"],\n listDeployments: [\"GET /repos/{owner}/{repo}/deployments\"],\n listForAuthenticatedUser: [\"GET /user/repos\"],\n listForOrg: [\"GET /orgs/{org}/repos\"],\n listForUser: [\"GET /users/{username}/repos\"],\n listForks: [\"GET /repos/{owner}/{repo}/forks\"],\n listInvitations: [\"GET /repos/{owner}/{repo}/invitations\"],\n listInvitationsForAuthenticatedUser: [\"GET /user/repository_invitations\"],\n listLanguages: [\"GET /repos/{owner}/{repo}/languages\"],\n listPagesBuilds: [\"GET /repos/{owner}/{repo}/pages/builds\"],\n listPublic: [\"GET /repositories\"],\n listPullRequestsAssociatedWithCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\"],\n listReleaseAssets: [\"GET /repos/{owner}/{repo}/releases/{release_id}/assets\"],\n listReleases: [\"GET /repos/{owner}/{repo}/releases\"],\n listTags: [\"GET /repos/{owner}/{repo}/tags\"],\n listTeams: [\"GET /repos/{owner}/{repo}/teams\"],\n listWebhookDeliveries: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\"],\n listWebhooks: [\"GET /repos/{owner}/{repo}/hooks\"],\n merge: [\"POST /repos/{owner}/{repo}/merges\"],\n mergeUpstream: [\"POST /repos/{owner}/{repo}/merge-upstream\"],\n pingWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\"],\n removeAppAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n removeCollaborator: [\"DELETE /repos/{owner}/{repo}/collaborators/{username}\"],\n removeStatusCheckContexts: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n removeStatusCheckProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n removeTeamAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n removeUserAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n renameBranch: [\"POST /repos/{owner}/{repo}/branches/{branch}/rename\"],\n replaceAllTopics: [\"PUT /repos/{owner}/{repo}/topics\", {\n mediaType: {\n previews: [\"mercy\"]\n }\n }],\n requestPagesBuild: [\"POST /repos/{owner}/{repo}/pages/builds\"],\n setAdminBranchProtection: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n setAppAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n setStatusCheckContexts: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n setTeamAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n setUserAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n testPushWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/tests\"],\n transfer: [\"POST /repos/{owner}/{repo}/transfer\"],\n update: [\"PATCH /repos/{owner}/{repo}\"],\n updateBranchProtection: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection\"],\n updateCommitComment: [\"PATCH /repos/{owner}/{repo}/comments/{comment_id}\"],\n updateInformationAboutPagesSite: [\"PUT /repos/{owner}/{repo}/pages\"],\n updateInvitation: [\"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}\"],\n updatePullRequestReviewProtection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n updateRelease: [\"PATCH /repos/{owner}/{repo}/releases/{release_id}\"],\n updateReleaseAsset: [\"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n updateStatusCheckPotection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\", {}, {\n renamed: [\"repos\", \"updateStatusCheckProtection\"]\n }],\n updateStatusCheckProtection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n updateWebhook: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}\"],\n updateWebhookConfigForRepo: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config\"],\n uploadReleaseAsset: [\"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}\", {\n baseUrl: \"https://uploads.github.com\"\n }]\n },\n search: {\n code: [\"GET /search/code\"],\n commits: [\"GET /search/commits\"],\n issuesAndPullRequests: [\"GET /search/issues\"],\n labels: [\"GET /search/labels\"],\n repos: [\"GET /search/repositories\"],\n topics: [\"GET /search/topics\", {\n mediaType: {\n previews: [\"mercy\"]\n }\n }],\n users: [\"GET /search/users\"]\n },\n secretScanning: {\n getAlert: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"],\n listAlertsForOrg: [\"GET /orgs/{org}/secret-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts\"],\n updateAlert: [\"PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"]\n },\n teams: {\n addOrUpdateMembershipForUserInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n addOrUpdateProjectPermissionsInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n addOrUpdateRepoPermissionsInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n checkPermissionsForProjectInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n checkPermissionsForRepoInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n create: [\"POST /orgs/{org}/teams\"],\n createDiscussionCommentInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"],\n createDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions\"],\n deleteDiscussionCommentInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n deleteDiscussionInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n deleteInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}\"],\n getByName: [\"GET /orgs/{org}/teams/{team_slug}\"],\n getDiscussionCommentInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n getDiscussionInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n getMembershipForUserInOrg: [\"GET /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n list: [\"GET /orgs/{org}/teams\"],\n listChildInOrg: [\"GET /orgs/{org}/teams/{team_slug}/teams\"],\n listDiscussionCommentsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"],\n listDiscussionsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions\"],\n listForAuthenticatedUser: [\"GET /user/teams\"],\n listMembersInOrg: [\"GET /orgs/{org}/teams/{team_slug}/members\"],\n listPendingInvitationsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/invitations\"],\n listProjectsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects\"],\n listReposInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos\"],\n removeMembershipForUserInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n removeProjectInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n removeRepoInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n updateDiscussionCommentInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n updateDiscussionInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n updateInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}\"]\n },\n users: {\n addEmailForAuthenticated: [\"POST /user/emails\", {}, {\n renamed: [\"users\", \"addEmailForAuthenticatedUser\"]\n }],\n addEmailForAuthenticatedUser: [\"POST /user/emails\"],\n block: [\"PUT /user/blocks/{username}\"],\n checkBlocked: [\"GET /user/blocks/{username}\"],\n checkFollowingForUser: [\"GET /users/{username}/following/{target_user}\"],\n checkPersonIsFollowedByAuthenticated: [\"GET /user/following/{username}\"],\n createGpgKeyForAuthenticated: [\"POST /user/gpg_keys\", {}, {\n renamed: [\"users\", \"createGpgKeyForAuthenticatedUser\"]\n }],\n createGpgKeyForAuthenticatedUser: [\"POST /user/gpg_keys\"],\n createPublicSshKeyForAuthenticated: [\"POST /user/keys\", {}, {\n renamed: [\"users\", \"createPublicSshKeyForAuthenticatedUser\"]\n }],\n createPublicSshKeyForAuthenticatedUser: [\"POST /user/keys\"],\n deleteEmailForAuthenticated: [\"DELETE /user/emails\", {}, {\n renamed: [\"users\", \"deleteEmailForAuthenticatedUser\"]\n }],\n deleteEmailForAuthenticatedUser: [\"DELETE /user/emails\"],\n deleteGpgKeyForAuthenticated: [\"DELETE /user/gpg_keys/{gpg_key_id}\", {}, {\n renamed: [\"users\", \"deleteGpgKeyForAuthenticatedUser\"]\n }],\n deleteGpgKeyForAuthenticatedUser: [\"DELETE /user/gpg_keys/{gpg_key_id}\"],\n deletePublicSshKeyForAuthenticated: [\"DELETE /user/keys/{key_id}\", {}, {\n renamed: [\"users\", \"deletePublicSshKeyForAuthenticatedUser\"]\n }],\n deletePublicSshKeyForAuthenticatedUser: [\"DELETE /user/keys/{key_id}\"],\n follow: [\"PUT /user/following/{username}\"],\n getAuthenticated: [\"GET /user\"],\n getByUsername: [\"GET /users/{username}\"],\n getContextForUser: [\"GET /users/{username}/hovercard\"],\n getGpgKeyForAuthenticated: [\"GET /user/gpg_keys/{gpg_key_id}\", {}, {\n renamed: [\"users\", \"getGpgKeyForAuthenticatedUser\"]\n }],\n getGpgKeyForAuthenticatedUser: [\"GET /user/gpg_keys/{gpg_key_id}\"],\n getPublicSshKeyForAuthenticated: [\"GET /user/keys/{key_id}\", {}, {\n renamed: [\"users\", \"getPublicSshKeyForAuthenticatedUser\"]\n }],\n getPublicSshKeyForAuthenticatedUser: [\"GET /user/keys/{key_id}\"],\n list: [\"GET /users\"],\n listBlockedByAuthenticated: [\"GET /user/blocks\", {}, {\n renamed: [\"users\", \"listBlockedByAuthenticatedUser\"]\n }],\n listBlockedByAuthenticatedUser: [\"GET /user/blocks\"],\n listEmailsForAuthenticated: [\"GET /user/emails\", {}, {\n renamed: [\"users\", \"listEmailsForAuthenticatedUser\"]\n }],\n listEmailsForAuthenticatedUser: [\"GET /user/emails\"],\n listFollowedByAuthenticated: [\"GET /user/following\", {}, {\n renamed: [\"users\", \"listFollowedByAuthenticatedUser\"]\n }],\n listFollowedByAuthenticatedUser: [\"GET /user/following\"],\n listFollowersForAuthenticatedUser: [\"GET /user/followers\"],\n listFollowersForUser: [\"GET /users/{username}/followers\"],\n listFollowingForUser: [\"GET /users/{username}/following\"],\n listGpgKeysForAuthenticated: [\"GET /user/gpg_keys\", {}, {\n renamed: [\"users\", \"listGpgKeysForAuthenticatedUser\"]\n }],\n listGpgKeysForAuthenticatedUser: [\"GET /user/gpg_keys\"],\n listGpgKeysForUser: [\"GET /users/{username}/gpg_keys\"],\n listPublicEmailsForAuthenticated: [\"GET /user/public_emails\", {}, {\n renamed: [\"users\", \"listPublicEmailsForAuthenticatedUser\"]\n }],\n listPublicEmailsForAuthenticatedUser: [\"GET /user/public_emails\"],\n listPublicKeysForUser: [\"GET /users/{username}/keys\"],\n listPublicSshKeysForAuthenticated: [\"GET /user/keys\", {}, {\n renamed: [\"users\", \"listPublicSshKeysForAuthenticatedUser\"]\n }],\n listPublicSshKeysForAuthenticatedUser: [\"GET /user/keys\"],\n setPrimaryEmailVisibilityForAuthenticated: [\"PATCH /user/email/visibility\", {}, {\n renamed: [\"users\", \"setPrimaryEmailVisibilityForAuthenticatedUser\"]\n }],\n setPrimaryEmailVisibilityForAuthenticatedUser: [\"PATCH /user/email/visibility\"],\n unblock: [\"DELETE /user/blocks/{username}\"],\n unfollow: [\"DELETE /user/following/{username}\"],\n updateAuthenticated: [\"PATCH /user\"]\n }\n};\n\nconst VERSION = \"5.13.0\";\n\nfunction endpointsToMethods(octokit, endpointsMap) {\n const newMethods = {};\n\n for (const [scope, endpoints] of Object.entries(endpointsMap)) {\n for (const [methodName, endpoint] of Object.entries(endpoints)) {\n const [route, defaults, decorations] = endpoint;\n const [method, url] = route.split(/ /);\n const endpointDefaults = Object.assign({\n method,\n url\n }, defaults);\n\n if (!newMethods[scope]) {\n newMethods[scope] = {};\n }\n\n const scopeMethods = newMethods[scope];\n\n if (decorations) {\n scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);\n continue;\n }\n\n scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);\n }\n }\n\n return newMethods;\n}\n\nfunction decorate(octokit, scope, methodName, defaults, decorations) {\n const requestWithDefaults = octokit.request.defaults(defaults);\n /* istanbul ignore next */\n\n function withDecorations(...args) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData`\n\n if (decorations.mapToData) {\n options = Object.assign({}, options, {\n data: options[decorations.mapToData],\n [decorations.mapToData]: undefined\n });\n return requestWithDefaults(options);\n }\n\n if (decorations.renamed) {\n const [newScope, newMethodName] = decorations.renamed;\n octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);\n }\n\n if (decorations.deprecated) {\n octokit.log.warn(decorations.deprecated);\n }\n\n if (decorations.renamedParameters) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n const options = requestWithDefaults.endpoint.merge(...args);\n\n for (const [name, alias] of Object.entries(decorations.renamedParameters)) {\n if (name in options) {\n octokit.log.warn(`\"${name}\" parameter is deprecated for \"octokit.${scope}.${methodName}()\". Use \"${alias}\" instead`);\n\n if (!(alias in options)) {\n options[alias] = options[name];\n }\n\n delete options[name];\n }\n }\n\n return requestWithDefaults(options);\n } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n\n\n return requestWithDefaults(...args);\n }\n\n return Object.assign(withDecorations, requestWithDefaults);\n}\n\nfunction restEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, Endpoints);\n return {\n rest: api\n };\n}\nrestEndpointMethods.VERSION = VERSION;\nfunction legacyRestEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, Endpoints);\n return _objectSpread2(_objectSpread2({}, api), {}, {\n rest: api\n });\n}\nlegacyRestEndpointMethods.VERSION = VERSION;\n\nexports.legacyRestEndpointMethods = legacyRestEndpointMethods;\nexports.restEndpointMethods = restEndpointMethods;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar Bottleneck = _interopDefault(require('bottleneck/light'));\n\n// @ts-ignore\nasync function errorRequest(octokit, state, error, options) {\n if (!error.request || !error.request.request) {\n // address https://github.com/octokit/plugin-retry.js/issues/8\n throw error;\n } // retry all >= 400 && not doNotRetry\n\n\n if (error.status >= 400 && !state.doNotRetry.includes(error.status)) {\n const retries = options.request.retries != null ? options.request.retries : state.retries;\n const retryAfter = Math.pow((options.request.retryCount || 0) + 1, 2);\n throw octokit.retry.retryRequest(error, retries, retryAfter);\n } // Maybe eventually there will be more cases here\n\n\n throw error;\n}\n\n// @ts-ignore\n\nasync function wrapRequest(state, request, options) {\n const limiter = new Bottleneck(); // @ts-ignore\n\n limiter.on(\"failed\", function (error, info) {\n const maxRetries = ~~error.request.request.retries;\n const after = ~~error.request.request.retryAfter;\n options.request.retryCount = info.retryCount + 1;\n\n if (maxRetries > info.retryCount) {\n // Returning a number instructs the limiter to retry\n // the request after that number of milliseconds have passed\n return after * state.retryAfterBaseValue;\n }\n });\n return limiter.schedule(request, options);\n}\n\nconst VERSION = \"3.0.9\";\nfunction retry(octokit, octokitOptions) {\n const state = Object.assign({\n enabled: true,\n retryAfterBaseValue: 1000,\n doNotRetry: [400, 401, 403, 404, 422],\n retries: 3\n }, octokitOptions.retry);\n\n if (state.enabled) {\n octokit.hook.error(\"request\", errorRequest.bind(null, octokit, state));\n octokit.hook.wrap(\"request\", wrapRequest.bind(null, state));\n }\n\n return {\n retry: {\n retryRequest: (error, retries, retryAfter) => {\n error.request.request = Object.assign({}, error.request.request, {\n retries: retries,\n retryAfter: retryAfter\n });\n return error;\n }\n }\n };\n}\nretry.VERSION = VERSION;\n\nexports.VERSION = VERSION;\nexports.retry = retry;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar deprecation = require('deprecation');\nvar once = _interopDefault(require('once'));\n\nconst logOnceCode = once(deprecation => console.warn(deprecation));\nconst logOnceHeaders = once(deprecation => console.warn(deprecation));\n/**\n * Error with extra properties to help with debugging\n */\n\nclass RequestError extends Error {\n constructor(message, statusCode, options) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = \"HttpError\";\n this.status = statusCode;\n let headers;\n\n if (\"headers\" in options && typeof options.headers !== \"undefined\") {\n headers = options.headers;\n }\n\n if (\"response\" in options) {\n this.response = options.response;\n headers = options.response.headers;\n } // redact request credentials without mutating original request options\n\n\n const requestCopy = Object.assign({}, options.request);\n\n if (options.request.headers.authorization) {\n requestCopy.headers = Object.assign({}, options.request.headers, {\n authorization: options.request.headers.authorization.replace(/ .*$/, \" [REDACTED]\")\n });\n }\n\n requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit\n // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications\n .replace(/\\bclient_secret=\\w+/g, \"client_secret=[REDACTED]\") // OAuth tokens can be passed as URL query parameters, although it is not recommended\n // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header\n .replace(/\\baccess_token=\\w+/g, \"access_token=[REDACTED]\");\n this.request = requestCopy; // deprecations\n\n Object.defineProperty(this, \"code\", {\n get() {\n logOnceCode(new deprecation.Deprecation(\"[@octokit/request-error] `error.code` is deprecated, use `error.status`.\"));\n return statusCode;\n }\n\n });\n Object.defineProperty(this, \"headers\", {\n get() {\n logOnceHeaders(new deprecation.Deprecation(\"[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.\"));\n return headers || {};\n }\n\n });\n }\n\n}\n\nexports.RequestError = RequestError;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar endpoint = require('@octokit/endpoint');\nvar universalUserAgent = require('universal-user-agent');\nvar isPlainObject = require('is-plain-object');\nvar nodeFetch = _interopDefault(require('node-fetch'));\nvar requestError = require('@octokit/request-error');\n\nconst VERSION = \"5.6.3\";\n\nfunction getBufferResponse(response) {\n return response.arrayBuffer();\n}\n\nfunction fetchWrapper(requestOptions) {\n const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console;\n\n if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {\n requestOptions.body = JSON.stringify(requestOptions.body);\n }\n\n let headers = {};\n let status;\n let url;\n const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;\n return fetch(requestOptions.url, Object.assign({\n method: requestOptions.method,\n body: requestOptions.body,\n headers: requestOptions.headers,\n redirect: requestOptions.redirect\n }, // `requestOptions.request.agent` type is incompatible\n // see https://github.com/octokit/types.ts/pull/264\n requestOptions.request)).then(async response => {\n url = response.url;\n status = response.status;\n\n for (const keyAndValue of response.headers) {\n headers[keyAndValue[0]] = keyAndValue[1];\n }\n\n if (\"deprecation\" in headers) {\n const matches = headers.link && headers.link.match(/<([^>]+)>; rel=\"deprecation\"/);\n const deprecationLink = matches && matches.pop();\n log.warn(`[@octokit/request] \"${requestOptions.method} ${requestOptions.url}\" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : \"\"}`);\n }\n\n if (status === 204 || status === 205) {\n return;\n } // GitHub API returns 200 for HEAD requests\n\n\n if (requestOptions.method === \"HEAD\") {\n if (status < 400) {\n return;\n }\n\n throw new requestError.RequestError(response.statusText, status, {\n response: {\n url,\n status,\n headers,\n data: undefined\n },\n request: requestOptions\n });\n }\n\n if (status === 304) {\n throw new requestError.RequestError(\"Not modified\", status, {\n response: {\n url,\n status,\n headers,\n data: await getResponseData(response)\n },\n request: requestOptions\n });\n }\n\n if (status >= 400) {\n const data = await getResponseData(response);\n const error = new requestError.RequestError(toErrorMessage(data), status, {\n response: {\n url,\n status,\n headers,\n data\n },\n request: requestOptions\n });\n throw error;\n }\n\n return getResponseData(response);\n }).then(data => {\n return {\n status,\n url,\n headers,\n data\n };\n }).catch(error => {\n if (error instanceof requestError.RequestError) throw error;\n throw new requestError.RequestError(error.message, 500, {\n request: requestOptions\n });\n });\n}\n\nasync function getResponseData(response) {\n const contentType = response.headers.get(\"content-type\");\n\n if (/application\\/json/.test(contentType)) {\n return response.json();\n }\n\n if (!contentType || /^text\\/|charset=utf-8$/.test(contentType)) {\n return response.text();\n }\n\n return getBufferResponse(response);\n}\n\nfunction toErrorMessage(data) {\n if (typeof data === \"string\") return data; // istanbul ignore else - just in case\n\n if (\"message\" in data) {\n if (Array.isArray(data.errors)) {\n return `${data.message}: ${data.errors.map(JSON.stringify).join(\", \")}`;\n }\n\n return data.message;\n } // istanbul ignore next - just in case\n\n\n return `Unknown error: ${JSON.stringify(data)}`;\n}\n\nfunction withDefaults(oldEndpoint, newDefaults) {\n const endpoint = oldEndpoint.defaults(newDefaults);\n\n const newApi = function (route, parameters) {\n const endpointOptions = endpoint.merge(route, parameters);\n\n if (!endpointOptions.request || !endpointOptions.request.hook) {\n return fetchWrapper(endpoint.parse(endpointOptions));\n }\n\n const request = (route, parameters) => {\n return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));\n };\n\n Object.assign(request, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n return endpointOptions.request.hook(request, endpointOptions);\n };\n\n return Object.assign(newApi, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n}\n\nconst request = withDefaults(endpoint.endpoint, {\n headers: {\n \"user-agent\": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n }\n});\n\nexports.request = request;\n//# sourceMappingURL=index.js.map\n","var register = require('./lib/register')\nvar addHook = require('./lib/add')\nvar removeHook = require('./lib/remove')\n\n// bind with array of arguments: https://stackoverflow.com/a/21792913\nvar bind = Function.bind\nvar bindable = bind.bind(bind)\n\nfunction bindApi (hook, state, name) {\n var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state])\n hook.api = { remove: removeHookRef }\n hook.remove = removeHookRef\n\n ;['before', 'error', 'after', 'wrap'].forEach(function (kind) {\n var args = name ? [state, kind, name] : [state, kind]\n hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args)\n })\n}\n\nfunction HookSingular () {\n var singularHookName = 'h'\n var singularHookState = {\n registry: {}\n }\n var singularHook = register.bind(null, singularHookState, singularHookName)\n bindApi(singularHook, singularHookState, singularHookName)\n return singularHook\n}\n\nfunction HookCollection () {\n var state = {\n registry: {}\n }\n\n var hook = register.bind(null, state)\n bindApi(hook, state)\n\n return hook\n}\n\nvar collectionHookDeprecationMessageDisplayed = false\nfunction Hook () {\n if (!collectionHookDeprecationMessageDisplayed) {\n console.warn('[before-after-hook]: \"Hook()\" repurposing warning, use \"Hook.Collection()\". Read more: https://git.io/upgrade-before-after-hook-to-1.4')\n collectionHookDeprecationMessageDisplayed = true\n }\n return HookCollection()\n}\n\nHook.Singular = HookSingular.bind()\nHook.Collection = HookCollection.bind()\n\nmodule.exports = Hook\n// expose constructors as a named property for TypeScript\nmodule.exports.Hook = Hook\nmodule.exports.Singular = Hook.Singular\nmodule.exports.Collection = Hook.Collection\n","module.exports = addHook;\n\nfunction addHook(state, kind, name, hook) {\n var orig = hook;\n if (!state.registry[name]) {\n state.registry[name] = [];\n }\n\n if (kind === \"before\") {\n hook = function (method, options) {\n return Promise.resolve()\n .then(orig.bind(null, options))\n .then(method.bind(null, options));\n };\n }\n\n if (kind === \"after\") {\n hook = function (method, options) {\n var result;\n return Promise.resolve()\n .then(method.bind(null, options))\n .then(function (result_) {\n result = result_;\n return orig(result, options);\n })\n .then(function () {\n return result;\n });\n };\n }\n\n if (kind === \"error\") {\n hook = function (method, options) {\n return Promise.resolve()\n .then(method.bind(null, options))\n .catch(function (error) {\n return orig(error, options);\n });\n };\n }\n\n state.registry[name].push({\n hook: hook,\n orig: orig,\n });\n}\n","module.exports = register;\n\nfunction register(state, name, method, options) {\n if (typeof method !== \"function\") {\n throw new Error(\"method for before hook must be a function\");\n }\n\n if (!options) {\n options = {};\n }\n\n if (Array.isArray(name)) {\n return name.reverse().reduce(function (callback, name) {\n return register.bind(null, state, name, callback, options);\n }, method)();\n }\n\n return Promise.resolve().then(function () {\n if (!state.registry[name]) {\n return method(options);\n }\n\n return state.registry[name].reduce(function (method, registered) {\n return registered.hook.bind(null, method, options);\n }, method)();\n });\n}\n","module.exports = removeHook;\n\nfunction removeHook(state, name, method) {\n if (!state.registry[name]) {\n return;\n }\n\n var index = state.registry[name]\n .map(function (registered) {\n return registered.orig;\n })\n .indexOf(method);\n\n if (index === -1) {\n return;\n }\n\n state.registry[name].splice(index, 1);\n}\n","/**\n * This file contains the Bottleneck library (MIT), compiled to ES2017, and without Clustering support.\n * https://github.com/SGrondin/bottleneck\n */\n(function (global, factory) {\n\ttypeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :\n\ttypeof define === 'function' && define.amd ? define(factory) :\n\t(global.Bottleneck = factory());\n}(this, (function () { 'use strict';\n\n\tvar commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};\n\n\tfunction getCjsExportFromNamespace (n) {\n\t\treturn n && n['default'] || n;\n\t}\n\n\tvar load = function(received, defaults, onto = {}) {\n\t var k, ref, v;\n\t for (k in defaults) {\n\t v = defaults[k];\n\t onto[k] = (ref = received[k]) != null ? ref : v;\n\t }\n\t return onto;\n\t};\n\n\tvar overwrite = function(received, defaults, onto = {}) {\n\t var k, v;\n\t for (k in received) {\n\t v = received[k];\n\t if (defaults[k] !== void 0) {\n\t onto[k] = v;\n\t }\n\t }\n\t return onto;\n\t};\n\n\tvar parser = {\n\t\tload: load,\n\t\toverwrite: overwrite\n\t};\n\n\tvar DLList;\n\n\tDLList = class DLList {\n\t constructor(incr, decr) {\n\t this.incr = incr;\n\t this.decr = decr;\n\t this._first = null;\n\t this._last = null;\n\t this.length = 0;\n\t }\n\n\t push(value) {\n\t var node;\n\t this.length++;\n\t if (typeof this.incr === \"function\") {\n\t this.incr();\n\t }\n\t node = {\n\t value,\n\t prev: this._last,\n\t next: null\n\t };\n\t if (this._last != null) {\n\t this._last.next = node;\n\t this._last = node;\n\t } else {\n\t this._first = this._last = node;\n\t }\n\t return void 0;\n\t }\n\n\t shift() {\n\t var value;\n\t if (this._first == null) {\n\t return;\n\t } else {\n\t this.length--;\n\t if (typeof this.decr === \"function\") {\n\t this.decr();\n\t }\n\t }\n\t value = this._first.value;\n\t if ((this._first = this._first.next) != null) {\n\t this._first.prev = null;\n\t } else {\n\t this._last = null;\n\t }\n\t return value;\n\t }\n\n\t first() {\n\t if (this._first != null) {\n\t return this._first.value;\n\t }\n\t }\n\n\t getArray() {\n\t var node, ref, results;\n\t node = this._first;\n\t results = [];\n\t while (node != null) {\n\t results.push((ref = node, node = node.next, ref.value));\n\t }\n\t return results;\n\t }\n\n\t forEachShift(cb) {\n\t var node;\n\t node = this.shift();\n\t while (node != null) {\n\t (cb(node), node = this.shift());\n\t }\n\t return void 0;\n\t }\n\n\t debug() {\n\t var node, ref, ref1, ref2, results;\n\t node = this._first;\n\t results = [];\n\t while (node != null) {\n\t results.push((ref = node, node = node.next, {\n\t value: ref.value,\n\t prev: (ref1 = ref.prev) != null ? ref1.value : void 0,\n\t next: (ref2 = ref.next) != null ? ref2.value : void 0\n\t }));\n\t }\n\t return results;\n\t }\n\n\t};\n\n\tvar DLList_1 = DLList;\n\n\tvar Events;\n\n\tEvents = class Events {\n\t constructor(instance) {\n\t this.instance = instance;\n\t this._events = {};\n\t if ((this.instance.on != null) || (this.instance.once != null) || (this.instance.removeAllListeners != null)) {\n\t throw new Error(\"An Emitter already exists for this object\");\n\t }\n\t this.instance.on = (name, cb) => {\n\t return this._addListener(name, \"many\", cb);\n\t };\n\t this.instance.once = (name, cb) => {\n\t return this._addListener(name, \"once\", cb);\n\t };\n\t this.instance.removeAllListeners = (name = null) => {\n\t if (name != null) {\n\t return delete this._events[name];\n\t } else {\n\t return this._events = {};\n\t }\n\t };\n\t }\n\n\t _addListener(name, status, cb) {\n\t var base;\n\t if ((base = this._events)[name] == null) {\n\t base[name] = [];\n\t }\n\t this._events[name].push({cb, status});\n\t return this.instance;\n\t }\n\n\t listenerCount(name) {\n\t if (this._events[name] != null) {\n\t return this._events[name].length;\n\t } else {\n\t return 0;\n\t }\n\t }\n\n\t async trigger(name, ...args) {\n\t var e, promises;\n\t try {\n\t if (name !== \"debug\") {\n\t this.trigger(\"debug\", `Event triggered: ${name}`, args);\n\t }\n\t if (this._events[name] == null) {\n\t return;\n\t }\n\t this._events[name] = this._events[name].filter(function(listener) {\n\t return listener.status !== \"none\";\n\t });\n\t promises = this._events[name].map(async(listener) => {\n\t var e, returned;\n\t if (listener.status === \"none\") {\n\t return;\n\t }\n\t if (listener.status === \"once\") {\n\t listener.status = \"none\";\n\t }\n\t try {\n\t returned = typeof listener.cb === \"function\" ? listener.cb(...args) : void 0;\n\t if (typeof (returned != null ? returned.then : void 0) === \"function\") {\n\t return (await returned);\n\t } else {\n\t return returned;\n\t }\n\t } catch (error) {\n\t e = error;\n\t {\n\t this.trigger(\"error\", e);\n\t }\n\t return null;\n\t }\n\t });\n\t return ((await Promise.all(promises))).find(function(x) {\n\t return x != null;\n\t });\n\t } catch (error) {\n\t e = error;\n\t {\n\t this.trigger(\"error\", e);\n\t }\n\t return null;\n\t }\n\t }\n\n\t};\n\n\tvar Events_1 = Events;\n\n\tvar DLList$1, Events$1, Queues;\n\n\tDLList$1 = DLList_1;\n\n\tEvents$1 = Events_1;\n\n\tQueues = class Queues {\n\t constructor(num_priorities) {\n\t var i;\n\t this.Events = new Events$1(this);\n\t this._length = 0;\n\t this._lists = (function() {\n\t var j, ref, results;\n\t results = [];\n\t for (i = j = 1, ref = num_priorities; (1 <= ref ? j <= ref : j >= ref); i = 1 <= ref ? ++j : --j) {\n\t results.push(new DLList$1((() => {\n\t return this.incr();\n\t }), (() => {\n\t return this.decr();\n\t })));\n\t }\n\t return results;\n\t }).call(this);\n\t }\n\n\t incr() {\n\t if (this._length++ === 0) {\n\t return this.Events.trigger(\"leftzero\");\n\t }\n\t }\n\n\t decr() {\n\t if (--this._length === 0) {\n\t return this.Events.trigger(\"zero\");\n\t }\n\t }\n\n\t push(job) {\n\t return this._lists[job.options.priority].push(job);\n\t }\n\n\t queued(priority) {\n\t if (priority != null) {\n\t return this._lists[priority].length;\n\t } else {\n\t return this._length;\n\t }\n\t }\n\n\t shiftAll(fn) {\n\t return this._lists.forEach(function(list) {\n\t return list.forEachShift(fn);\n\t });\n\t }\n\n\t getFirst(arr = this._lists) {\n\t var j, len, list;\n\t for (j = 0, len = arr.length; j < len; j++) {\n\t list = arr[j];\n\t if (list.length > 0) {\n\t return list;\n\t }\n\t }\n\t return [];\n\t }\n\n\t shiftLastFrom(priority) {\n\t return this.getFirst(this._lists.slice(priority).reverse()).shift();\n\t }\n\n\t};\n\n\tvar Queues_1 = Queues;\n\n\tvar BottleneckError;\n\n\tBottleneckError = class BottleneckError extends Error {};\n\n\tvar BottleneckError_1 = BottleneckError;\n\n\tvar BottleneckError$1, DEFAULT_PRIORITY, Job, NUM_PRIORITIES, parser$1;\n\n\tNUM_PRIORITIES = 10;\n\n\tDEFAULT_PRIORITY = 5;\n\n\tparser$1 = parser;\n\n\tBottleneckError$1 = BottleneckError_1;\n\n\tJob = class Job {\n\t constructor(task, args, options, jobDefaults, rejectOnDrop, Events, _states, Promise) {\n\t this.task = task;\n\t this.args = args;\n\t this.rejectOnDrop = rejectOnDrop;\n\t this.Events = Events;\n\t this._states = _states;\n\t this.Promise = Promise;\n\t this.options = parser$1.load(options, jobDefaults);\n\t this.options.priority = this._sanitizePriority(this.options.priority);\n\t if (this.options.id === jobDefaults.id) {\n\t this.options.id = `${this.options.id}-${this._randomIndex()}`;\n\t }\n\t this.promise = new this.Promise((_resolve, _reject) => {\n\t this._resolve = _resolve;\n\t this._reject = _reject;\n\t });\n\t this.retryCount = 0;\n\t }\n\n\t _sanitizePriority(priority) {\n\t var sProperty;\n\t sProperty = ~~priority !== priority ? DEFAULT_PRIORITY : priority;\n\t if (sProperty < 0) {\n\t return 0;\n\t } else if (sProperty > NUM_PRIORITIES - 1) {\n\t return NUM_PRIORITIES - 1;\n\t } else {\n\t return sProperty;\n\t }\n\t }\n\n\t _randomIndex() {\n\t return Math.random().toString(36).slice(2);\n\t }\n\n\t doDrop({error, message = \"This job has been dropped by Bottleneck\"} = {}) {\n\t if (this._states.remove(this.options.id)) {\n\t if (this.rejectOnDrop) {\n\t this._reject(error != null ? error : new BottleneckError$1(message));\n\t }\n\t this.Events.trigger(\"dropped\", {args: this.args, options: this.options, task: this.task, promise: this.promise});\n\t return true;\n\t } else {\n\t return false;\n\t }\n\t }\n\n\t _assertStatus(expected) {\n\t var status;\n\t status = this._states.jobStatus(this.options.id);\n\t if (!(status === expected || (expected === \"DONE\" && status === null))) {\n\t throw new BottleneckError$1(`Invalid job status ${status}, expected ${expected}. Please open an issue at https://github.com/SGrondin/bottleneck/issues`);\n\t }\n\t }\n\n\t doReceive() {\n\t this._states.start(this.options.id);\n\t return this.Events.trigger(\"received\", {args: this.args, options: this.options});\n\t }\n\n\t doQueue(reachedHWM, blocked) {\n\t this._assertStatus(\"RECEIVED\");\n\t this._states.next(this.options.id);\n\t return this.Events.trigger(\"queued\", {args: this.args, options: this.options, reachedHWM, blocked});\n\t }\n\n\t doRun() {\n\t if (this.retryCount === 0) {\n\t this._assertStatus(\"QUEUED\");\n\t this._states.next(this.options.id);\n\t } else {\n\t this._assertStatus(\"EXECUTING\");\n\t }\n\t return this.Events.trigger(\"scheduled\", {args: this.args, options: this.options});\n\t }\n\n\t async doExecute(chained, clearGlobalState, run, free) {\n\t var error, eventInfo, passed;\n\t if (this.retryCount === 0) {\n\t this._assertStatus(\"RUNNING\");\n\t this._states.next(this.options.id);\n\t } else {\n\t this._assertStatus(\"EXECUTING\");\n\t }\n\t eventInfo = {args: this.args, options: this.options, retryCount: this.retryCount};\n\t this.Events.trigger(\"executing\", eventInfo);\n\t try {\n\t passed = (await (chained != null ? chained.schedule(this.options, this.task, ...this.args) : this.task(...this.args)));\n\t if (clearGlobalState()) {\n\t this.doDone(eventInfo);\n\t await free(this.options, eventInfo);\n\t this._assertStatus(\"DONE\");\n\t return this._resolve(passed);\n\t }\n\t } catch (error1) {\n\t error = error1;\n\t return this._onFailure(error, eventInfo, clearGlobalState, run, free);\n\t }\n\t }\n\n\t doExpire(clearGlobalState, run, free) {\n\t var error, eventInfo;\n\t if (this._states.jobStatus(this.options.id === \"RUNNING\")) {\n\t this._states.next(this.options.id);\n\t }\n\t this._assertStatus(\"EXECUTING\");\n\t eventInfo = {args: this.args, options: this.options, retryCount: this.retryCount};\n\t error = new BottleneckError$1(`This job timed out after ${this.options.expiration} ms.`);\n\t return this._onFailure(error, eventInfo, clearGlobalState, run, free);\n\t }\n\n\t async _onFailure(error, eventInfo, clearGlobalState, run, free) {\n\t var retry, retryAfter;\n\t if (clearGlobalState()) {\n\t retry = (await this.Events.trigger(\"failed\", error, eventInfo));\n\t if (retry != null) {\n\t retryAfter = ~~retry;\n\t this.Events.trigger(\"retry\", `Retrying ${this.options.id} after ${retryAfter} ms`, eventInfo);\n\t this.retryCount++;\n\t return run(retryAfter);\n\t } else {\n\t this.doDone(eventInfo);\n\t await free(this.options, eventInfo);\n\t this._assertStatus(\"DONE\");\n\t return this._reject(error);\n\t }\n\t }\n\t }\n\n\t doDone(eventInfo) {\n\t this._assertStatus(\"EXECUTING\");\n\t this._states.next(this.options.id);\n\t return this.Events.trigger(\"done\", eventInfo);\n\t }\n\n\t};\n\n\tvar Job_1 = Job;\n\n\tvar BottleneckError$2, LocalDatastore, parser$2;\n\n\tparser$2 = parser;\n\n\tBottleneckError$2 = BottleneckError_1;\n\n\tLocalDatastore = class LocalDatastore {\n\t constructor(instance, storeOptions, storeInstanceOptions) {\n\t this.instance = instance;\n\t this.storeOptions = storeOptions;\n\t this.clientId = this.instance._randomIndex();\n\t parser$2.load(storeInstanceOptions, storeInstanceOptions, this);\n\t this._nextRequest = this._lastReservoirRefresh = this._lastReservoirIncrease = Date.now();\n\t this._running = 0;\n\t this._done = 0;\n\t this._unblockTime = 0;\n\t this.ready = this.Promise.resolve();\n\t this.clients = {};\n\t this._startHeartbeat();\n\t }\n\n\t _startHeartbeat() {\n\t var base;\n\t if ((this.heartbeat == null) && (((this.storeOptions.reservoirRefreshInterval != null) && (this.storeOptions.reservoirRefreshAmount != null)) || ((this.storeOptions.reservoirIncreaseInterval != null) && (this.storeOptions.reservoirIncreaseAmount != null)))) {\n\t return typeof (base = (this.heartbeat = setInterval(() => {\n\t var amount, incr, maximum, now, reservoir;\n\t now = Date.now();\n\t if ((this.storeOptions.reservoirRefreshInterval != null) && now >= this._lastReservoirRefresh + this.storeOptions.reservoirRefreshInterval) {\n\t this._lastReservoirRefresh = now;\n\t this.storeOptions.reservoir = this.storeOptions.reservoirRefreshAmount;\n\t this.instance._drainAll(this.computeCapacity());\n\t }\n\t if ((this.storeOptions.reservoirIncreaseInterval != null) && now >= this._lastReservoirIncrease + this.storeOptions.reservoirIncreaseInterval) {\n\t ({\n\t reservoirIncreaseAmount: amount,\n\t reservoirIncreaseMaximum: maximum,\n\t reservoir\n\t } = this.storeOptions);\n\t this._lastReservoirIncrease = now;\n\t incr = maximum != null ? Math.min(amount, maximum - reservoir) : amount;\n\t if (incr > 0) {\n\t this.storeOptions.reservoir += incr;\n\t return this.instance._drainAll(this.computeCapacity());\n\t }\n\t }\n\t }, this.heartbeatInterval))).unref === \"function\" ? base.unref() : void 0;\n\t } else {\n\t return clearInterval(this.heartbeat);\n\t }\n\t }\n\n\t async __publish__(message) {\n\t await this.yieldLoop();\n\t return this.instance.Events.trigger(\"message\", message.toString());\n\t }\n\n\t async __disconnect__(flush) {\n\t await this.yieldLoop();\n\t clearInterval(this.heartbeat);\n\t return this.Promise.resolve();\n\t }\n\n\t yieldLoop(t = 0) {\n\t return new this.Promise(function(resolve, reject) {\n\t return setTimeout(resolve, t);\n\t });\n\t }\n\n\t computePenalty() {\n\t var ref;\n\t return (ref = this.storeOptions.penalty) != null ? ref : (15 * this.storeOptions.minTime) || 5000;\n\t }\n\n\t async __updateSettings__(options) {\n\t await this.yieldLoop();\n\t parser$2.overwrite(options, options, this.storeOptions);\n\t this._startHeartbeat();\n\t this.instance._drainAll(this.computeCapacity());\n\t return true;\n\t }\n\n\t async __running__() {\n\t await this.yieldLoop();\n\t return this._running;\n\t }\n\n\t async __queued__() {\n\t await this.yieldLoop();\n\t return this.instance.queued();\n\t }\n\n\t async __done__() {\n\t await this.yieldLoop();\n\t return this._done;\n\t }\n\n\t async __groupCheck__(time) {\n\t await this.yieldLoop();\n\t return (this._nextRequest + this.timeout) < time;\n\t }\n\n\t computeCapacity() {\n\t var maxConcurrent, reservoir;\n\t ({maxConcurrent, reservoir} = this.storeOptions);\n\t if ((maxConcurrent != null) && (reservoir != null)) {\n\t return Math.min(maxConcurrent - this._running, reservoir);\n\t } else if (maxConcurrent != null) {\n\t return maxConcurrent - this._running;\n\t } else if (reservoir != null) {\n\t return reservoir;\n\t } else {\n\t return null;\n\t }\n\t }\n\n\t conditionsCheck(weight) {\n\t var capacity;\n\t capacity = this.computeCapacity();\n\t return (capacity == null) || weight <= capacity;\n\t }\n\n\t async __incrementReservoir__(incr) {\n\t var reservoir;\n\t await this.yieldLoop();\n\t reservoir = this.storeOptions.reservoir += incr;\n\t this.instance._drainAll(this.computeCapacity());\n\t return reservoir;\n\t }\n\n\t async __currentReservoir__() {\n\t await this.yieldLoop();\n\t return this.storeOptions.reservoir;\n\t }\n\n\t isBlocked(now) {\n\t return this._unblockTime >= now;\n\t }\n\n\t check(weight, now) {\n\t return this.conditionsCheck(weight) && (this._nextRequest - now) <= 0;\n\t }\n\n\t async __check__(weight) {\n\t var now;\n\t await this.yieldLoop();\n\t now = Date.now();\n\t return this.check(weight, now);\n\t }\n\n\t async __register__(index, weight, expiration) {\n\t var now, wait;\n\t await this.yieldLoop();\n\t now = Date.now();\n\t if (this.conditionsCheck(weight)) {\n\t this._running += weight;\n\t if (this.storeOptions.reservoir != null) {\n\t this.storeOptions.reservoir -= weight;\n\t }\n\t wait = Math.max(this._nextRequest - now, 0);\n\t this._nextRequest = now + wait + this.storeOptions.minTime;\n\t return {\n\t success: true,\n\t wait,\n\t reservoir: this.storeOptions.reservoir\n\t };\n\t } else {\n\t return {\n\t success: false\n\t };\n\t }\n\t }\n\n\t strategyIsBlock() {\n\t return this.storeOptions.strategy === 3;\n\t }\n\n\t async __submit__(queueLength, weight) {\n\t var blocked, now, reachedHWM;\n\t await this.yieldLoop();\n\t if ((this.storeOptions.maxConcurrent != null) && weight > this.storeOptions.maxConcurrent) {\n\t throw new BottleneckError$2(`Impossible to add a job having a weight of ${weight} to a limiter having a maxConcurrent setting of ${this.storeOptions.maxConcurrent}`);\n\t }\n\t now = Date.now();\n\t reachedHWM = (this.storeOptions.highWater != null) && queueLength === this.storeOptions.highWater && !this.check(weight, now);\n\t blocked = this.strategyIsBlock() && (reachedHWM || this.isBlocked(now));\n\t if (blocked) {\n\t this._unblockTime = now + this.computePenalty();\n\t this._nextRequest = this._unblockTime + this.storeOptions.minTime;\n\t this.instance._dropAllQueued();\n\t }\n\t return {\n\t reachedHWM,\n\t blocked,\n\t strategy: this.storeOptions.strategy\n\t };\n\t }\n\n\t async __free__(index, weight) {\n\t await this.yieldLoop();\n\t this._running -= weight;\n\t this._done += weight;\n\t this.instance._drainAll(this.computeCapacity());\n\t return {\n\t running: this._running\n\t };\n\t }\n\n\t};\n\n\tvar LocalDatastore_1 = LocalDatastore;\n\n\tvar BottleneckError$3, States;\n\n\tBottleneckError$3 = BottleneckError_1;\n\n\tStates = class States {\n\t constructor(status1) {\n\t this.status = status1;\n\t this._jobs = {};\n\t this.counts = this.status.map(function() {\n\t return 0;\n\t });\n\t }\n\n\t next(id) {\n\t var current, next;\n\t current = this._jobs[id];\n\t next = current + 1;\n\t if ((current != null) && next < this.status.length) {\n\t this.counts[current]--;\n\t this.counts[next]++;\n\t return this._jobs[id]++;\n\t } else if (current != null) {\n\t this.counts[current]--;\n\t return delete this._jobs[id];\n\t }\n\t }\n\n\t start(id) {\n\t var initial;\n\t initial = 0;\n\t this._jobs[id] = initial;\n\t return this.counts[initial]++;\n\t }\n\n\t remove(id) {\n\t var current;\n\t current = this._jobs[id];\n\t if (current != null) {\n\t this.counts[current]--;\n\t delete this._jobs[id];\n\t }\n\t return current != null;\n\t }\n\n\t jobStatus(id) {\n\t var ref;\n\t return (ref = this.status[this._jobs[id]]) != null ? ref : null;\n\t }\n\n\t statusJobs(status) {\n\t var k, pos, ref, results, v;\n\t if (status != null) {\n\t pos = this.status.indexOf(status);\n\t if (pos < 0) {\n\t throw new BottleneckError$3(`status must be one of ${this.status.join(', ')}`);\n\t }\n\t ref = this._jobs;\n\t results = [];\n\t for (k in ref) {\n\t v = ref[k];\n\t if (v === pos) {\n\t results.push(k);\n\t }\n\t }\n\t return results;\n\t } else {\n\t return Object.keys(this._jobs);\n\t }\n\t }\n\n\t statusCounts() {\n\t return this.counts.reduce(((acc, v, i) => {\n\t acc[this.status[i]] = v;\n\t return acc;\n\t }), {});\n\t }\n\n\t};\n\n\tvar States_1 = States;\n\n\tvar DLList$2, Sync;\n\n\tDLList$2 = DLList_1;\n\n\tSync = class Sync {\n\t constructor(name, Promise) {\n\t this.schedule = this.schedule.bind(this);\n\t this.name = name;\n\t this.Promise = Promise;\n\t this._running = 0;\n\t this._queue = new DLList$2();\n\t }\n\n\t isEmpty() {\n\t return this._queue.length === 0;\n\t }\n\n\t async _tryToRun() {\n\t var args, cb, error, reject, resolve, returned, task;\n\t if ((this._running < 1) && this._queue.length > 0) {\n\t this._running++;\n\t ({task, args, resolve, reject} = this._queue.shift());\n\t cb = (await (async function() {\n\t try {\n\t returned = (await task(...args));\n\t return function() {\n\t return resolve(returned);\n\t };\n\t } catch (error1) {\n\t error = error1;\n\t return function() {\n\t return reject(error);\n\t };\n\t }\n\t })());\n\t this._running--;\n\t this._tryToRun();\n\t return cb();\n\t }\n\t }\n\n\t schedule(task, ...args) {\n\t var promise, reject, resolve;\n\t resolve = reject = null;\n\t promise = new this.Promise(function(_resolve, _reject) {\n\t resolve = _resolve;\n\t return reject = _reject;\n\t });\n\t this._queue.push({task, args, resolve, reject});\n\t this._tryToRun();\n\t return promise;\n\t }\n\n\t};\n\n\tvar Sync_1 = Sync;\n\n\tvar version = \"2.19.5\";\n\tvar version$1 = {\n\t\tversion: version\n\t};\n\n\tvar version$2 = /*#__PURE__*/Object.freeze({\n\t\tversion: version,\n\t\tdefault: version$1\n\t});\n\n\tvar require$$2 = () => console.log('You must import the full version of Bottleneck in order to use this feature.');\n\n\tvar require$$3 = () => console.log('You must import the full version of Bottleneck in order to use this feature.');\n\n\tvar require$$4 = () => console.log('You must import the full version of Bottleneck in order to use this feature.');\n\n\tvar Events$2, Group, IORedisConnection$1, RedisConnection$1, Scripts$1, parser$3;\n\n\tparser$3 = parser;\n\n\tEvents$2 = Events_1;\n\n\tRedisConnection$1 = require$$2;\n\n\tIORedisConnection$1 = require$$3;\n\n\tScripts$1 = require$$4;\n\n\tGroup = (function() {\n\t class Group {\n\t constructor(limiterOptions = {}) {\n\t this.deleteKey = this.deleteKey.bind(this);\n\t this.limiterOptions = limiterOptions;\n\t parser$3.load(this.limiterOptions, this.defaults, this);\n\t this.Events = new Events$2(this);\n\t this.instances = {};\n\t this.Bottleneck = Bottleneck_1;\n\t this._startAutoCleanup();\n\t this.sharedConnection = this.connection != null;\n\t if (this.connection == null) {\n\t if (this.limiterOptions.datastore === \"redis\") {\n\t this.connection = new RedisConnection$1(Object.assign({}, this.limiterOptions, {Events: this.Events}));\n\t } else if (this.limiterOptions.datastore === \"ioredis\") {\n\t this.connection = new IORedisConnection$1(Object.assign({}, this.limiterOptions, {Events: this.Events}));\n\t }\n\t }\n\t }\n\n\t key(key = \"\") {\n\t var ref;\n\t return (ref = this.instances[key]) != null ? ref : (() => {\n\t var limiter;\n\t limiter = this.instances[key] = new this.Bottleneck(Object.assign(this.limiterOptions, {\n\t id: `${this.id}-${key}`,\n\t timeout: this.timeout,\n\t connection: this.connection\n\t }));\n\t this.Events.trigger(\"created\", limiter, key);\n\t return limiter;\n\t })();\n\t }\n\n\t async deleteKey(key = \"\") {\n\t var deleted, instance;\n\t instance = this.instances[key];\n\t if (this.connection) {\n\t deleted = (await this.connection.__runCommand__(['del', ...Scripts$1.allKeys(`${this.id}-${key}`)]));\n\t }\n\t if (instance != null) {\n\t delete this.instances[key];\n\t await instance.disconnect();\n\t }\n\t return (instance != null) || deleted > 0;\n\t }\n\n\t limiters() {\n\t var k, ref, results, v;\n\t ref = this.instances;\n\t results = [];\n\t for (k in ref) {\n\t v = ref[k];\n\t results.push({\n\t key: k,\n\t limiter: v\n\t });\n\t }\n\t return results;\n\t }\n\n\t keys() {\n\t return Object.keys(this.instances);\n\t }\n\n\t async clusterKeys() {\n\t var cursor, end, found, i, k, keys, len, next, start;\n\t if (this.connection == null) {\n\t return this.Promise.resolve(this.keys());\n\t }\n\t keys = [];\n\t cursor = null;\n\t start = `b_${this.id}-`.length;\n\t end = \"_settings\".length;\n\t while (cursor !== 0) {\n\t [next, found] = (await this.connection.__runCommand__([\"scan\", cursor != null ? cursor : 0, \"match\", `b_${this.id}-*_settings`, \"count\", 10000]));\n\t cursor = ~~next;\n\t for (i = 0, len = found.length; i < len; i++) {\n\t k = found[i];\n\t keys.push(k.slice(start, -end));\n\t }\n\t }\n\t return keys;\n\t }\n\n\t _startAutoCleanup() {\n\t var base;\n\t clearInterval(this.interval);\n\t return typeof (base = (this.interval = setInterval(async() => {\n\t var e, k, ref, results, time, v;\n\t time = Date.now();\n\t ref = this.instances;\n\t results = [];\n\t for (k in ref) {\n\t v = ref[k];\n\t try {\n\t if ((await v._store.__groupCheck__(time))) {\n\t results.push(this.deleteKey(k));\n\t } else {\n\t results.push(void 0);\n\t }\n\t } catch (error) {\n\t e = error;\n\t results.push(v.Events.trigger(\"error\", e));\n\t }\n\t }\n\t return results;\n\t }, this.timeout / 2))).unref === \"function\" ? base.unref() : void 0;\n\t }\n\n\t updateSettings(options = {}) {\n\t parser$3.overwrite(options, this.defaults, this);\n\t parser$3.overwrite(options, options, this.limiterOptions);\n\t if (options.timeout != null) {\n\t return this._startAutoCleanup();\n\t }\n\t }\n\n\t disconnect(flush = true) {\n\t var ref;\n\t if (!this.sharedConnection) {\n\t return (ref = this.connection) != null ? ref.disconnect(flush) : void 0;\n\t }\n\t }\n\n\t }\n\t Group.prototype.defaults = {\n\t timeout: 1000 * 60 * 5,\n\t connection: null,\n\t Promise: Promise,\n\t id: \"group-key\"\n\t };\n\n\t return Group;\n\n\t}).call(commonjsGlobal);\n\n\tvar Group_1 = Group;\n\n\tvar Batcher, Events$3, parser$4;\n\n\tparser$4 = parser;\n\n\tEvents$3 = Events_1;\n\n\tBatcher = (function() {\n\t class Batcher {\n\t constructor(options = {}) {\n\t this.options = options;\n\t parser$4.load(this.options, this.defaults, this);\n\t this.Events = new Events$3(this);\n\t this._arr = [];\n\t this._resetPromise();\n\t this._lastFlush = Date.now();\n\t }\n\n\t _resetPromise() {\n\t return this._promise = new this.Promise((res, rej) => {\n\t return this._resolve = res;\n\t });\n\t }\n\n\t _flush() {\n\t clearTimeout(this._timeout);\n\t this._lastFlush = Date.now();\n\t this._resolve();\n\t this.Events.trigger(\"batch\", this._arr);\n\t this._arr = [];\n\t return this._resetPromise();\n\t }\n\n\t add(data) {\n\t var ret;\n\t this._arr.push(data);\n\t ret = this._promise;\n\t if (this._arr.length === this.maxSize) {\n\t this._flush();\n\t } else if ((this.maxTime != null) && this._arr.length === 1) {\n\t this._timeout = setTimeout(() => {\n\t return this._flush();\n\t }, this.maxTime);\n\t }\n\t return ret;\n\t }\n\n\t }\n\t Batcher.prototype.defaults = {\n\t maxTime: null,\n\t maxSize: null,\n\t Promise: Promise\n\t };\n\n\t return Batcher;\n\n\t}).call(commonjsGlobal);\n\n\tvar Batcher_1 = Batcher;\n\n\tvar require$$4$1 = () => console.log('You must import the full version of Bottleneck in order to use this feature.');\n\n\tvar require$$8 = getCjsExportFromNamespace(version$2);\n\n\tvar Bottleneck, DEFAULT_PRIORITY$1, Events$4, Job$1, LocalDatastore$1, NUM_PRIORITIES$1, Queues$1, RedisDatastore$1, States$1, Sync$1, parser$5,\n\t splice = [].splice;\n\n\tNUM_PRIORITIES$1 = 10;\n\n\tDEFAULT_PRIORITY$1 = 5;\n\n\tparser$5 = parser;\n\n\tQueues$1 = Queues_1;\n\n\tJob$1 = Job_1;\n\n\tLocalDatastore$1 = LocalDatastore_1;\n\n\tRedisDatastore$1 = require$$4$1;\n\n\tEvents$4 = Events_1;\n\n\tStates$1 = States_1;\n\n\tSync$1 = Sync_1;\n\n\tBottleneck = (function() {\n\t class Bottleneck {\n\t constructor(options = {}, ...invalid) {\n\t var storeInstanceOptions, storeOptions;\n\t this._addToQueue = this._addToQueue.bind(this);\n\t this._validateOptions(options, invalid);\n\t parser$5.load(options, this.instanceDefaults, this);\n\t this._queues = new Queues$1(NUM_PRIORITIES$1);\n\t this._scheduled = {};\n\t this._states = new States$1([\"RECEIVED\", \"QUEUED\", \"RUNNING\", \"EXECUTING\"].concat(this.trackDoneStatus ? [\"DONE\"] : []));\n\t this._limiter = null;\n\t this.Events = new Events$4(this);\n\t this._submitLock = new Sync$1(\"submit\", this.Promise);\n\t this._registerLock = new Sync$1(\"register\", this.Promise);\n\t storeOptions = parser$5.load(options, this.storeDefaults, {});\n\t this._store = (function() {\n\t if (this.datastore === \"redis\" || this.datastore === \"ioredis\" || (this.connection != null)) {\n\t storeInstanceOptions = parser$5.load(options, this.redisStoreDefaults, {});\n\t return new RedisDatastore$1(this, storeOptions, storeInstanceOptions);\n\t } else if (this.datastore === \"local\") {\n\t storeInstanceOptions = parser$5.load(options, this.localStoreDefaults, {});\n\t return new LocalDatastore$1(this, storeOptions, storeInstanceOptions);\n\t } else {\n\t throw new Bottleneck.prototype.BottleneckError(`Invalid datastore type: ${this.datastore}`);\n\t }\n\t }).call(this);\n\t this._queues.on(\"leftzero\", () => {\n\t var ref;\n\t return (ref = this._store.heartbeat) != null ? typeof ref.ref === \"function\" ? ref.ref() : void 0 : void 0;\n\t });\n\t this._queues.on(\"zero\", () => {\n\t var ref;\n\t return (ref = this._store.heartbeat) != null ? typeof ref.unref === \"function\" ? ref.unref() : void 0 : void 0;\n\t });\n\t }\n\n\t _validateOptions(options, invalid) {\n\t if (!((options != null) && typeof options === \"object\" && invalid.length === 0)) {\n\t throw new Bottleneck.prototype.BottleneckError(\"Bottleneck v2 takes a single object argument. Refer to https://github.com/SGrondin/bottleneck#upgrading-to-v2 if you're upgrading from Bottleneck v1.\");\n\t }\n\t }\n\n\t ready() {\n\t return this._store.ready;\n\t }\n\n\t clients() {\n\t return this._store.clients;\n\t }\n\n\t channel() {\n\t return `b_${this.id}`;\n\t }\n\n\t channel_client() {\n\t return `b_${this.id}_${this._store.clientId}`;\n\t }\n\n\t publish(message) {\n\t return this._store.__publish__(message);\n\t }\n\n\t disconnect(flush = true) {\n\t return this._store.__disconnect__(flush);\n\t }\n\n\t chain(_limiter) {\n\t this._limiter = _limiter;\n\t return this;\n\t }\n\n\t queued(priority) {\n\t return this._queues.queued(priority);\n\t }\n\n\t clusterQueued() {\n\t return this._store.__queued__();\n\t }\n\n\t empty() {\n\t return this.queued() === 0 && this._submitLock.isEmpty();\n\t }\n\n\t running() {\n\t return this._store.__running__();\n\t }\n\n\t done() {\n\t return this._store.__done__();\n\t }\n\n\t jobStatus(id) {\n\t return this._states.jobStatus(id);\n\t }\n\n\t jobs(status) {\n\t return this._states.statusJobs(status);\n\t }\n\n\t counts() {\n\t return this._states.statusCounts();\n\t }\n\n\t _randomIndex() {\n\t return Math.random().toString(36).slice(2);\n\t }\n\n\t check(weight = 1) {\n\t return this._store.__check__(weight);\n\t }\n\n\t _clearGlobalState(index) {\n\t if (this._scheduled[index] != null) {\n\t clearTimeout(this._scheduled[index].expiration);\n\t delete this._scheduled[index];\n\t return true;\n\t } else {\n\t return false;\n\t }\n\t }\n\n\t async _free(index, job, options, eventInfo) {\n\t var e, running;\n\t try {\n\t ({running} = (await this._store.__free__(index, options.weight)));\n\t this.Events.trigger(\"debug\", `Freed ${options.id}`, eventInfo);\n\t if (running === 0 && this.empty()) {\n\t return this.Events.trigger(\"idle\");\n\t }\n\t } catch (error1) {\n\t e = error1;\n\t return this.Events.trigger(\"error\", e);\n\t }\n\t }\n\n\t _run(index, job, wait) {\n\t var clearGlobalState, free, run;\n\t job.doRun();\n\t clearGlobalState = this._clearGlobalState.bind(this, index);\n\t run = this._run.bind(this, index, job);\n\t free = this._free.bind(this, index, job);\n\t return this._scheduled[index] = {\n\t timeout: setTimeout(() => {\n\t return job.doExecute(this._limiter, clearGlobalState, run, free);\n\t }, wait),\n\t expiration: job.options.expiration != null ? setTimeout(function() {\n\t return job.doExpire(clearGlobalState, run, free);\n\t }, wait + job.options.expiration) : void 0,\n\t job: job\n\t };\n\t }\n\n\t _drainOne(capacity) {\n\t return this._registerLock.schedule(() => {\n\t var args, index, next, options, queue;\n\t if (this.queued() === 0) {\n\t return this.Promise.resolve(null);\n\t }\n\t queue = this._queues.getFirst();\n\t ({options, args} = next = queue.first());\n\t if ((capacity != null) && options.weight > capacity) {\n\t return this.Promise.resolve(null);\n\t }\n\t this.Events.trigger(\"debug\", `Draining ${options.id}`, {args, options});\n\t index = this._randomIndex();\n\t return this._store.__register__(index, options.weight, options.expiration).then(({success, wait, reservoir}) => {\n\t var empty;\n\t this.Events.trigger(\"debug\", `Drained ${options.id}`, {success, args, options});\n\t if (success) {\n\t queue.shift();\n\t empty = this.empty();\n\t if (empty) {\n\t this.Events.trigger(\"empty\");\n\t }\n\t if (reservoir === 0) {\n\t this.Events.trigger(\"depleted\", empty);\n\t }\n\t this._run(index, next, wait);\n\t return this.Promise.resolve(options.weight);\n\t } else {\n\t return this.Promise.resolve(null);\n\t }\n\t });\n\t });\n\t }\n\n\t _drainAll(capacity, total = 0) {\n\t return this._drainOne(capacity).then((drained) => {\n\t var newCapacity;\n\t if (drained != null) {\n\t newCapacity = capacity != null ? capacity - drained : capacity;\n\t return this._drainAll(newCapacity, total + drained);\n\t } else {\n\t return this.Promise.resolve(total);\n\t }\n\t }).catch((e) => {\n\t return this.Events.trigger(\"error\", e);\n\t });\n\t }\n\n\t _dropAllQueued(message) {\n\t return this._queues.shiftAll(function(job) {\n\t return job.doDrop({message});\n\t });\n\t }\n\n\t stop(options = {}) {\n\t var done, waitForExecuting;\n\t options = parser$5.load(options, this.stopDefaults);\n\t waitForExecuting = (at) => {\n\t var finished;\n\t finished = () => {\n\t var counts;\n\t counts = this._states.counts;\n\t return (counts[0] + counts[1] + counts[2] + counts[3]) === at;\n\t };\n\t return new this.Promise((resolve, reject) => {\n\t if (finished()) {\n\t return resolve();\n\t } else {\n\t return this.on(\"done\", () => {\n\t if (finished()) {\n\t this.removeAllListeners(\"done\");\n\t return resolve();\n\t }\n\t });\n\t }\n\t });\n\t };\n\t done = options.dropWaitingJobs ? (this._run = function(index, next) {\n\t return next.doDrop({\n\t message: options.dropErrorMessage\n\t });\n\t }, this._drainOne = () => {\n\t return this.Promise.resolve(null);\n\t }, this._registerLock.schedule(() => {\n\t return this._submitLock.schedule(() => {\n\t var k, ref, v;\n\t ref = this._scheduled;\n\t for (k in ref) {\n\t v = ref[k];\n\t if (this.jobStatus(v.job.options.id) === \"RUNNING\") {\n\t clearTimeout(v.timeout);\n\t clearTimeout(v.expiration);\n\t v.job.doDrop({\n\t message: options.dropErrorMessage\n\t });\n\t }\n\t }\n\t this._dropAllQueued(options.dropErrorMessage);\n\t return waitForExecuting(0);\n\t });\n\t })) : this.schedule({\n\t priority: NUM_PRIORITIES$1 - 1,\n\t weight: 0\n\t }, () => {\n\t return waitForExecuting(1);\n\t });\n\t this._receive = function(job) {\n\t return job._reject(new Bottleneck.prototype.BottleneckError(options.enqueueErrorMessage));\n\t };\n\t this.stop = () => {\n\t return this.Promise.reject(new Bottleneck.prototype.BottleneckError(\"stop() has already been called\"));\n\t };\n\t return done;\n\t }\n\n\t async _addToQueue(job) {\n\t var args, blocked, error, options, reachedHWM, shifted, strategy;\n\t ({args, options} = job);\n\t try {\n\t ({reachedHWM, blocked, strategy} = (await this._store.__submit__(this.queued(), options.weight)));\n\t } catch (error1) {\n\t error = error1;\n\t this.Events.trigger(\"debug\", `Could not queue ${options.id}`, {args, options, error});\n\t job.doDrop({error});\n\t return false;\n\t }\n\t if (blocked) {\n\t job.doDrop();\n\t return true;\n\t } else if (reachedHWM) {\n\t shifted = strategy === Bottleneck.prototype.strategy.LEAK ? this._queues.shiftLastFrom(options.priority) : strategy === Bottleneck.prototype.strategy.OVERFLOW_PRIORITY ? this._queues.shiftLastFrom(options.priority + 1) : strategy === Bottleneck.prototype.strategy.OVERFLOW ? job : void 0;\n\t if (shifted != null) {\n\t shifted.doDrop();\n\t }\n\t if ((shifted == null) || strategy === Bottleneck.prototype.strategy.OVERFLOW) {\n\t if (shifted == null) {\n\t job.doDrop();\n\t }\n\t return reachedHWM;\n\t }\n\t }\n\t job.doQueue(reachedHWM, blocked);\n\t this._queues.push(job);\n\t await this._drainAll();\n\t return reachedHWM;\n\t }\n\n\t _receive(job) {\n\t if (this._states.jobStatus(job.options.id) != null) {\n\t job._reject(new Bottleneck.prototype.BottleneckError(`A job with the same id already exists (id=${job.options.id})`));\n\t return false;\n\t } else {\n\t job.doReceive();\n\t return this._submitLock.schedule(this._addToQueue, job);\n\t }\n\t }\n\n\t submit(...args) {\n\t var cb, fn, job, options, ref, ref1, task;\n\t if (typeof args[0] === \"function\") {\n\t ref = args, [fn, ...args] = ref, [cb] = splice.call(args, -1);\n\t options = parser$5.load({}, this.jobDefaults);\n\t } else {\n\t ref1 = args, [options, fn, ...args] = ref1, [cb] = splice.call(args, -1);\n\t options = parser$5.load(options, this.jobDefaults);\n\t }\n\t task = (...args) => {\n\t return new this.Promise(function(resolve, reject) {\n\t return fn(...args, function(...args) {\n\t return (args[0] != null ? reject : resolve)(args);\n\t });\n\t });\n\t };\n\t job = new Job$1(task, args, options, this.jobDefaults, this.rejectOnDrop, this.Events, this._states, this.Promise);\n\t job.promise.then(function(args) {\n\t return typeof cb === \"function\" ? cb(...args) : void 0;\n\t }).catch(function(args) {\n\t if (Array.isArray(args)) {\n\t return typeof cb === \"function\" ? cb(...args) : void 0;\n\t } else {\n\t return typeof cb === \"function\" ? cb(args) : void 0;\n\t }\n\t });\n\t return this._receive(job);\n\t }\n\n\t schedule(...args) {\n\t var job, options, task;\n\t if (typeof args[0] === \"function\") {\n\t [task, ...args] = args;\n\t options = {};\n\t } else {\n\t [options, task, ...args] = args;\n\t }\n\t job = new Job$1(task, args, options, this.jobDefaults, this.rejectOnDrop, this.Events, this._states, this.Promise);\n\t this._receive(job);\n\t return job.promise;\n\t }\n\n\t wrap(fn) {\n\t var schedule, wrapped;\n\t schedule = this.schedule.bind(this);\n\t wrapped = function(...args) {\n\t return schedule(fn.bind(this), ...args);\n\t };\n\t wrapped.withOptions = function(options, ...args) {\n\t return schedule(options, fn, ...args);\n\t };\n\t return wrapped;\n\t }\n\n\t async updateSettings(options = {}) {\n\t await this._store.__updateSettings__(parser$5.overwrite(options, this.storeDefaults));\n\t parser$5.overwrite(options, this.instanceDefaults, this);\n\t return this;\n\t }\n\n\t currentReservoir() {\n\t return this._store.__currentReservoir__();\n\t }\n\n\t incrementReservoir(incr = 0) {\n\t return this._store.__incrementReservoir__(incr);\n\t }\n\n\t }\n\t Bottleneck.default = Bottleneck;\n\n\t Bottleneck.Events = Events$4;\n\n\t Bottleneck.version = Bottleneck.prototype.version = require$$8.version;\n\n\t Bottleneck.strategy = Bottleneck.prototype.strategy = {\n\t LEAK: 1,\n\t OVERFLOW: 2,\n\t OVERFLOW_PRIORITY: 4,\n\t BLOCK: 3\n\t };\n\n\t Bottleneck.BottleneckError = Bottleneck.prototype.BottleneckError = BottleneckError_1;\n\n\t Bottleneck.Group = Bottleneck.prototype.Group = Group_1;\n\n\t Bottleneck.RedisConnection = Bottleneck.prototype.RedisConnection = require$$2;\n\n\t Bottleneck.IORedisConnection = Bottleneck.prototype.IORedisConnection = require$$3;\n\n\t Bottleneck.Batcher = Bottleneck.prototype.Batcher = Batcher_1;\n\n\t Bottleneck.prototype.jobDefaults = {\n\t priority: DEFAULT_PRIORITY$1,\n\t weight: 1,\n\t expiration: null,\n\t id: \"\"\n\t };\n\n\t Bottleneck.prototype.storeDefaults = {\n\t maxConcurrent: null,\n\t minTime: 0,\n\t highWater: null,\n\t strategy: Bottleneck.prototype.strategy.LEAK,\n\t penalty: null,\n\t reservoir: null,\n\t reservoirRefreshInterval: null,\n\t reservoirRefreshAmount: null,\n\t reservoirIncreaseInterval: null,\n\t reservoirIncreaseAmount: null,\n\t reservoirIncreaseMaximum: null\n\t };\n\n\t Bottleneck.prototype.localStoreDefaults = {\n\t Promise: Promise,\n\t timeout: null,\n\t heartbeatInterval: 250\n\t };\n\n\t Bottleneck.prototype.redisStoreDefaults = {\n\t Promise: Promise,\n\t timeout: null,\n\t heartbeatInterval: 5000,\n\t clientTimeout: 10000,\n\t Redis: null,\n\t clientOptions: {},\n\t clusterNodes: null,\n\t clearDatastore: false,\n\t connection: null\n\t };\n\n\t Bottleneck.prototype.instanceDefaults = {\n\t datastore: \"local\",\n\t connection: null,\n\t id: \"\",\n\t rejectOnDrop: true,\n\t trackDoneStatus: false,\n\t Promise: Promise\n\t };\n\n\t Bottleneck.prototype.stopDefaults = {\n\t enqueueErrorMessage: \"This limiter has been stopped and cannot accept new jobs.\",\n\t dropWaitingJobs: true,\n\t dropErrorMessage: \"This limiter has been stopped.\"\n\t };\n\n\t return Bottleneck;\n\n\t}).call(commonjsGlobal);\n\n\tvar Bottleneck_1 = Bottleneck;\n\n\tvar lib = Bottleneck_1;\n\n\treturn lib;\n\n})));\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nclass Deprecation extends Error {\n constructor(message) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = 'Deprecation';\n }\n\n}\n\nexports.Deprecation = Deprecation;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n/*!\n * is-plain-object \n *\n * Copyright (c) 2014-2017, Jon Schlinkert.\n * Released under the MIT License.\n */\n\nfunction isObject(o) {\n return Object.prototype.toString.call(o) === '[object Object]';\n}\n\nfunction isPlainObject(o) {\n var ctor,prot;\n\n if (isObject(o) === false) return false;\n\n // If has modified constructor\n ctor = o.constructor;\n if (ctor === undefined) return true;\n\n // If has modified prototype\n prot = ctor.prototype;\n if (isObject(prot) === false) return false;\n\n // If constructor does not have an Object-specific method\n if (prot.hasOwnProperty('isPrototypeOf') === false) {\n return false;\n }\n\n // Most likely a plain Object\n return true;\n}\n\nexports.isPlainObject = isPlainObject;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar Stream = _interopDefault(require('stream'));\nvar http = _interopDefault(require('http'));\nvar Url = _interopDefault(require('url'));\nvar whatwgUrl = _interopDefault(require('whatwg-url'));\nvar https = _interopDefault(require('https'));\nvar zlib = _interopDefault(require('zlib'));\n\n// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js\n\n// fix for \"Readable\" isn't a named export issue\nconst Readable = Stream.Readable;\n\nconst BUFFER = Symbol('buffer');\nconst TYPE = Symbol('type');\n\nclass Blob {\n\tconstructor() {\n\t\tthis[TYPE] = '';\n\n\t\tconst blobParts = arguments[0];\n\t\tconst options = arguments[1];\n\n\t\tconst buffers = [];\n\t\tlet size = 0;\n\n\t\tif (blobParts) {\n\t\t\tconst a = blobParts;\n\t\t\tconst length = Number(a.length);\n\t\t\tfor (let i = 0; i < length; i++) {\n\t\t\t\tconst element = a[i];\n\t\t\t\tlet buffer;\n\t\t\t\tif (element instanceof Buffer) {\n\t\t\t\t\tbuffer = element;\n\t\t\t\t} else if (ArrayBuffer.isView(element)) {\n\t\t\t\t\tbuffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);\n\t\t\t\t} else if (element instanceof ArrayBuffer) {\n\t\t\t\t\tbuffer = Buffer.from(element);\n\t\t\t\t} else if (element instanceof Blob) {\n\t\t\t\t\tbuffer = element[BUFFER];\n\t\t\t\t} else {\n\t\t\t\t\tbuffer = Buffer.from(typeof element === 'string' ? element : String(element));\n\t\t\t\t}\n\t\t\t\tsize += buffer.length;\n\t\t\t\tbuffers.push(buffer);\n\t\t\t}\n\t\t}\n\n\t\tthis[BUFFER] = Buffer.concat(buffers);\n\n\t\tlet type = options && options.type !== undefined && String(options.type).toLowerCase();\n\t\tif (type && !/[^\\u0020-\\u007E]/.test(type)) {\n\t\t\tthis[TYPE] = type;\n\t\t}\n\t}\n\tget size() {\n\t\treturn this[BUFFER].length;\n\t}\n\tget type() {\n\t\treturn this[TYPE];\n\t}\n\ttext() {\n\t\treturn Promise.resolve(this[BUFFER].toString());\n\t}\n\tarrayBuffer() {\n\t\tconst buf = this[BUFFER];\n\t\tconst ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\treturn Promise.resolve(ab);\n\t}\n\tstream() {\n\t\tconst readable = new Readable();\n\t\treadable._read = function () {};\n\t\treadable.push(this[BUFFER]);\n\t\treadable.push(null);\n\t\treturn readable;\n\t}\n\ttoString() {\n\t\treturn '[object Blob]';\n\t}\n\tslice() {\n\t\tconst size = this.size;\n\n\t\tconst start = arguments[0];\n\t\tconst end = arguments[1];\n\t\tlet relativeStart, relativeEnd;\n\t\tif (start === undefined) {\n\t\t\trelativeStart = 0;\n\t\t} else if (start < 0) {\n\t\t\trelativeStart = Math.max(size + start, 0);\n\t\t} else {\n\t\t\trelativeStart = Math.min(start, size);\n\t\t}\n\t\tif (end === undefined) {\n\t\t\trelativeEnd = size;\n\t\t} else if (end < 0) {\n\t\t\trelativeEnd = Math.max(size + end, 0);\n\t\t} else {\n\t\t\trelativeEnd = Math.min(end, size);\n\t\t}\n\t\tconst span = Math.max(relativeEnd - relativeStart, 0);\n\n\t\tconst buffer = this[BUFFER];\n\t\tconst slicedBuffer = buffer.slice(relativeStart, relativeStart + span);\n\t\tconst blob = new Blob([], { type: arguments[2] });\n\t\tblob[BUFFER] = slicedBuffer;\n\t\treturn blob;\n\t}\n}\n\nObject.defineProperties(Blob.prototype, {\n\tsize: { enumerable: true },\n\ttype: { enumerable: true },\n\tslice: { enumerable: true }\n});\n\nObject.defineProperty(Blob.prototype, Symbol.toStringTag, {\n\tvalue: 'Blob',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * fetch-error.js\n *\n * FetchError interface for operational errors\n */\n\n/**\n * Create FetchError instance\n *\n * @param String message Error message for human\n * @param String type Error type for machine\n * @param String systemError For Node.js system error\n * @return FetchError\n */\nfunction FetchError(message, type, systemError) {\n Error.call(this, message);\n\n this.message = message;\n this.type = type;\n\n // when err.type is `system`, err.code contains system error code\n if (systemError) {\n this.code = this.errno = systemError.code;\n }\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nFetchError.prototype = Object.create(Error.prototype);\nFetchError.prototype.constructor = FetchError;\nFetchError.prototype.name = 'FetchError';\n\nlet convert;\ntry {\n\tconvert = require('encoding').convert;\n} catch (e) {}\n\nconst INTERNALS = Symbol('Body internals');\n\n// fix an issue where \"PassThrough\" isn't a named export for node <10\nconst PassThrough = Stream.PassThrough;\n\n/**\n * Body mixin\n *\n * Ref: https://fetch.spec.whatwg.org/#body\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nfunction Body(body) {\n\tvar _this = this;\n\n\tvar _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},\n\t _ref$size = _ref.size;\n\n\tlet size = _ref$size === undefined ? 0 : _ref$size;\n\tvar _ref$timeout = _ref.timeout;\n\tlet timeout = _ref$timeout === undefined ? 0 : _ref$timeout;\n\n\tif (body == null) {\n\t\t// body is undefined or null\n\t\tbody = null;\n\t} else if (isURLSearchParams(body)) {\n\t\t// body is a URLSearchParams\n\t\tbody = Buffer.from(body.toString());\n\t} else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {\n\t\t// body is ArrayBuffer\n\t\tbody = Buffer.from(body);\n\t} else if (ArrayBuffer.isView(body)) {\n\t\t// body is ArrayBufferView\n\t\tbody = Buffer.from(body.buffer, body.byteOffset, body.byteLength);\n\t} else if (body instanceof Stream) ; else {\n\t\t// none of the above\n\t\t// coerce to string then buffer\n\t\tbody = Buffer.from(String(body));\n\t}\n\tthis[INTERNALS] = {\n\t\tbody,\n\t\tdisturbed: false,\n\t\terror: null\n\t};\n\tthis.size = size;\n\tthis.timeout = timeout;\n\n\tif (body instanceof Stream) {\n\t\tbody.on('error', function (err) {\n\t\t\tconst error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);\n\t\t\t_this[INTERNALS].error = error;\n\t\t});\n\t}\n}\n\nBody.prototype = {\n\tget body() {\n\t\treturn this[INTERNALS].body;\n\t},\n\n\tget bodyUsed() {\n\t\treturn this[INTERNALS].disturbed;\n\t},\n\n\t/**\n * Decode response as ArrayBuffer\n *\n * @return Promise\n */\n\tarrayBuffer() {\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\t});\n\t},\n\n\t/**\n * Return raw response as Blob\n *\n * @return Promise\n */\n\tblob() {\n\t\tlet ct = this.headers && this.headers.get('content-type') || '';\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn Object.assign(\n\t\t\t// Prevent copying\n\t\t\tnew Blob([], {\n\t\t\t\ttype: ct.toLowerCase()\n\t\t\t}), {\n\t\t\t\t[BUFFER]: buf\n\t\t\t});\n\t\t});\n\t},\n\n\t/**\n * Decode response as json\n *\n * @return Promise\n */\n\tjson() {\n\t\tvar _this2 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\ttry {\n\t\t\t\treturn JSON.parse(buffer.toString());\n\t\t\t} catch (err) {\n\t\t\t\treturn Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));\n\t\t\t}\n\t\t});\n\t},\n\n\t/**\n * Decode response as text\n *\n * @return Promise\n */\n\ttext() {\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn buffer.toString();\n\t\t});\n\t},\n\n\t/**\n * Decode response as buffer (non-spec api)\n *\n * @return Promise\n */\n\tbuffer() {\n\t\treturn consumeBody.call(this);\n\t},\n\n\t/**\n * Decode response as text, while automatically detecting the encoding and\n * trying to decode to UTF-8 (non-spec api)\n *\n * @return Promise\n */\n\ttextConverted() {\n\t\tvar _this3 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn convertBody(buffer, _this3.headers);\n\t\t});\n\t}\n};\n\n// In browsers, all properties are enumerable.\nObject.defineProperties(Body.prototype, {\n\tbody: { enumerable: true },\n\tbodyUsed: { enumerable: true },\n\tarrayBuffer: { enumerable: true },\n\tblob: { enumerable: true },\n\tjson: { enumerable: true },\n\ttext: { enumerable: true }\n});\n\nBody.mixIn = function (proto) {\n\tfor (const name of Object.getOwnPropertyNames(Body.prototype)) {\n\t\t// istanbul ignore else: future proof\n\t\tif (!(name in proto)) {\n\t\t\tconst desc = Object.getOwnPropertyDescriptor(Body.prototype, name);\n\t\t\tObject.defineProperty(proto, name, desc);\n\t\t}\n\t}\n};\n\n/**\n * Consume and convert an entire Body to a Buffer.\n *\n * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body\n *\n * @return Promise\n */\nfunction consumeBody() {\n\tvar _this4 = this;\n\n\tif (this[INTERNALS].disturbed) {\n\t\treturn Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));\n\t}\n\n\tthis[INTERNALS].disturbed = true;\n\n\tif (this[INTERNALS].error) {\n\t\treturn Body.Promise.reject(this[INTERNALS].error);\n\t}\n\n\tlet body = this.body;\n\n\t// body is null\n\tif (body === null) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is blob\n\tif (isBlob(body)) {\n\t\tbody = body.stream();\n\t}\n\n\t// body is buffer\n\tif (Buffer.isBuffer(body)) {\n\t\treturn Body.Promise.resolve(body);\n\t}\n\n\t// istanbul ignore if: should never happen\n\tif (!(body instanceof Stream)) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is stream\n\t// get ready to actually consume the body\n\tlet accum = [];\n\tlet accumBytes = 0;\n\tlet abort = false;\n\n\treturn new Body.Promise(function (resolve, reject) {\n\t\tlet resTimeout;\n\n\t\t// allow timeout on slow response body\n\t\tif (_this4.timeout) {\n\t\t\tresTimeout = setTimeout(function () {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));\n\t\t\t}, _this4.timeout);\n\t\t}\n\n\t\t// handle stream errors\n\t\tbody.on('error', function (err) {\n\t\t\tif (err.name === 'AbortError') {\n\t\t\t\t// if the request was aborted, reject with this Error\n\t\t\t\tabort = true;\n\t\t\t\treject(err);\n\t\t\t} else {\n\t\t\t\t// other errors, such as incorrect content-encoding\n\t\t\t\treject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\n\t\tbody.on('data', function (chunk) {\n\t\t\tif (abort || chunk === null) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (_this4.size && accumBytes + chunk.length > _this4.size) {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\taccumBytes += chunk.length;\n\t\t\taccum.push(chunk);\n\t\t});\n\n\t\tbody.on('end', function () {\n\t\t\tif (abort) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tclearTimeout(resTimeout);\n\n\t\t\ttry {\n\t\t\t\tresolve(Buffer.concat(accum, accumBytes));\n\t\t\t} catch (err) {\n\t\t\t\t// handle streams that have accumulated too much data (issue #414)\n\t\t\t\treject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\t});\n}\n\n/**\n * Detect buffer encoding and convert to target encoding\n * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding\n *\n * @param Buffer buffer Incoming buffer\n * @param String encoding Target encoding\n * @return String\n */\nfunction convertBody(buffer, headers) {\n\tif (typeof convert !== 'function') {\n\t\tthrow new Error('The package `encoding` must be installed to use the textConverted() function');\n\t}\n\n\tconst ct = headers.get('content-type');\n\tlet charset = 'utf-8';\n\tlet res, str;\n\n\t// header\n\tif (ct) {\n\t\tres = /charset=([^;]*)/i.exec(ct);\n\t}\n\n\t// no charset in content type, peek at response body for at most 1024 bytes\n\tstr = buffer.slice(0, 1024).toString();\n\n\t// html5\n\tif (!res && str) {\n\t\tres = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;\n\n\t\tthis[MAP] = Object.create(null);\n\n\t\tif (init instanceof Headers) {\n\t\t\tconst rawHeaders = init.raw();\n\t\t\tconst headerNames = Object.keys(rawHeaders);\n\n\t\t\tfor (const headerName of headerNames) {\n\t\t\t\tfor (const value of rawHeaders[headerName]) {\n\t\t\t\t\tthis.append(headerName, value);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn;\n\t\t}\n\n\t\t// We don't worry about converting prop to ByteString here as append()\n\t\t// will handle it.\n\t\tif (init == null) ; else if (typeof init === 'object') {\n\t\t\tconst method = init[Symbol.iterator];\n\t\t\tif (method != null) {\n\t\t\t\tif (typeof method !== 'function') {\n\t\t\t\t\tthrow new TypeError('Header pairs must be iterable');\n\t\t\t\t}\n\n\t\t\t\t// sequence>\n\t\t\t\t// Note: per spec we have to first exhaust the lists then process them\n\t\t\t\tconst pairs = [];\n\t\t\t\tfor (const pair of init) {\n\t\t\t\t\tif (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be iterable');\n\t\t\t\t\t}\n\t\t\t\t\tpairs.push(Array.from(pair));\n\t\t\t\t}\n\n\t\t\t\tfor (const pair of pairs) {\n\t\t\t\t\tif (pair.length !== 2) {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be a name/value tuple');\n\t\t\t\t\t}\n\t\t\t\t\tthis.append(pair[0], pair[1]);\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// record\n\t\t\t\tfor (const key of Object.keys(init)) {\n\t\t\t\t\tconst value = init[key];\n\t\t\t\t\tthis.append(key, value);\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tthrow new TypeError('Provided initializer must be an object');\n\t\t}\n\t}\n\n\t/**\n * Return combined header value given name\n *\n * @param String name Header name\n * @return Mixed\n */\n\tget(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key === undefined) {\n\t\t\treturn null;\n\t\t}\n\n\t\treturn this[MAP][key].join(', ');\n\t}\n\n\t/**\n * Iterate over all headers\n *\n * @param Function callback Executed for each item with parameters (value, name, thisArg)\n * @param Boolean thisArg `this` context for callback function\n * @return Void\n */\n\tforEach(callback) {\n\t\tlet thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;\n\n\t\tlet pairs = getHeaders(this);\n\t\tlet i = 0;\n\t\twhile (i < pairs.length) {\n\t\t\tvar _pairs$i = pairs[i];\n\t\t\tconst name = _pairs$i[0],\n\t\t\t value = _pairs$i[1];\n\n\t\t\tcallback.call(thisArg, value, name, this);\n\t\t\tpairs = getHeaders(this);\n\t\t\ti++;\n\t\t}\n\t}\n\n\t/**\n * Overwrite header values given name\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tset(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tthis[MAP][key !== undefined ? key : name] = [value];\n\t}\n\n\t/**\n * Append a value onto existing header\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tappend(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tthis[MAP][key].push(value);\n\t\t} else {\n\t\t\tthis[MAP][name] = [value];\n\t\t}\n\t}\n\n\t/**\n * Check for header name existence\n *\n * @param String name Header name\n * @return Boolean\n */\n\thas(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\treturn find(this[MAP], name) !== undefined;\n\t}\n\n\t/**\n * Delete all header values given name\n *\n * @param String name Header name\n * @return Void\n */\n\tdelete(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tdelete this[MAP][key];\n\t\t}\n\t}\n\n\t/**\n * Return raw headers (non-spec api)\n *\n * @return Object\n */\n\traw() {\n\t\treturn this[MAP];\n\t}\n\n\t/**\n * Get an iterator on keys.\n *\n * @return Iterator\n */\n\tkeys() {\n\t\treturn createHeadersIterator(this, 'key');\n\t}\n\n\t/**\n * Get an iterator on values.\n *\n * @return Iterator\n */\n\tvalues() {\n\t\treturn createHeadersIterator(this, 'value');\n\t}\n\n\t/**\n * Get an iterator on entries.\n *\n * This is the default iterator of the Headers object.\n *\n * @return Iterator\n */\n\t[Symbol.iterator]() {\n\t\treturn createHeadersIterator(this, 'key+value');\n\t}\n}\nHeaders.prototype.entries = Headers.prototype[Symbol.iterator];\n\nObject.defineProperty(Headers.prototype, Symbol.toStringTag, {\n\tvalue: 'Headers',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Headers.prototype, {\n\tget: { enumerable: true },\n\tforEach: { enumerable: true },\n\tset: { enumerable: true },\n\tappend: { enumerable: true },\n\thas: { enumerable: true },\n\tdelete: { enumerable: true },\n\tkeys: { enumerable: true },\n\tvalues: { enumerable: true },\n\tentries: { enumerable: true }\n});\n\nfunction getHeaders(headers) {\n\tlet kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';\n\n\tconst keys = Object.keys(headers[MAP]).sort();\n\treturn keys.map(kind === 'key' ? function (k) {\n\t\treturn k.toLowerCase();\n\t} : kind === 'value' ? function (k) {\n\t\treturn headers[MAP][k].join(', ');\n\t} : function (k) {\n\t\treturn [k.toLowerCase(), headers[MAP][k].join(', ')];\n\t});\n}\n\nconst INTERNAL = Symbol('internal');\n\nfunction createHeadersIterator(target, kind) {\n\tconst iterator = Object.create(HeadersIteratorPrototype);\n\titerator[INTERNAL] = {\n\t\ttarget,\n\t\tkind,\n\t\tindex: 0\n\t};\n\treturn iterator;\n}\n\nconst HeadersIteratorPrototype = Object.setPrototypeOf({\n\tnext() {\n\t\t// istanbul ignore if\n\t\tif (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {\n\t\t\tthrow new TypeError('Value of `this` is not a HeadersIterator');\n\t\t}\n\n\t\tvar _INTERNAL = this[INTERNAL];\n\t\tconst target = _INTERNAL.target,\n\t\t kind = _INTERNAL.kind,\n\t\t index = _INTERNAL.index;\n\n\t\tconst values = getHeaders(target, kind);\n\t\tconst len = values.length;\n\t\tif (index >= len) {\n\t\t\treturn {\n\t\t\t\tvalue: undefined,\n\t\t\t\tdone: true\n\t\t\t};\n\t\t}\n\n\t\tthis[INTERNAL].index = index + 1;\n\n\t\treturn {\n\t\t\tvalue: values[index],\n\t\t\tdone: false\n\t\t};\n\t}\n}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));\n\nObject.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {\n\tvalue: 'HeadersIterator',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * Export the Headers object in a form that Node.js can consume.\n *\n * @param Headers headers\n * @return Object\n */\nfunction exportNodeCompatibleHeaders(headers) {\n\tconst obj = Object.assign({ __proto__: null }, headers[MAP]);\n\n\t// http.request() only supports string as Host header. This hack makes\n\t// specifying custom Host header possible.\n\tconst hostHeaderKey = find(headers[MAP], 'Host');\n\tif (hostHeaderKey !== undefined) {\n\t\tobj[hostHeaderKey] = obj[hostHeaderKey][0];\n\t}\n\n\treturn obj;\n}\n\n/**\n * Create a Headers object from an object of headers, ignoring those that do\n * not conform to HTTP grammar productions.\n *\n * @param Object obj Object of headers\n * @return Headers\n */\nfunction createHeadersLenient(obj) {\n\tconst headers = new Headers();\n\tfor (const name of Object.keys(obj)) {\n\t\tif (invalidTokenRegex.test(name)) {\n\t\t\tcontinue;\n\t\t}\n\t\tif (Array.isArray(obj[name])) {\n\t\t\tfor (const val of obj[name]) {\n\t\t\t\tif (invalidHeaderCharRegex.test(val)) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tif (headers[MAP][name] === undefined) {\n\t\t\t\t\theaders[MAP][name] = [val];\n\t\t\t\t} else {\n\t\t\t\t\theaders[MAP][name].push(val);\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (!invalidHeaderCharRegex.test(obj[name])) {\n\t\t\theaders[MAP][name] = [obj[name]];\n\t\t}\n\t}\n\treturn headers;\n}\n\nconst INTERNALS$1 = Symbol('Response internals');\n\n// fix an issue where \"STATUS_CODES\" aren't a named export for node <10\nconst STATUS_CODES = http.STATUS_CODES;\n\n/**\n * Response class\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nclass Response {\n\tconstructor() {\n\t\tlet body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;\n\t\tlet opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tBody.call(this, body, opts);\n\n\t\tconst status = opts.status || 200;\n\t\tconst headers = new Headers(opts.headers);\n\n\t\tif (body != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(body);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tthis[INTERNALS$1] = {\n\t\t\turl: opts.url,\n\t\t\tstatus,\n\t\t\tstatusText: opts.statusText || STATUS_CODES[status],\n\t\t\theaders,\n\t\t\tcounter: opts.counter\n\t\t};\n\t}\n\n\tget url() {\n\t\treturn this[INTERNALS$1].url || '';\n\t}\n\n\tget status() {\n\t\treturn this[INTERNALS$1].status;\n\t}\n\n\t/**\n * Convenience property representing if the request ended normally\n */\n\tget ok() {\n\t\treturn this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;\n\t}\n\n\tget redirected() {\n\t\treturn this[INTERNALS$1].counter > 0;\n\t}\n\n\tget statusText() {\n\t\treturn this[INTERNALS$1].statusText;\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$1].headers;\n\t}\n\n\t/**\n * Clone this response\n *\n * @return Response\n */\n\tclone() {\n\t\treturn new Response(clone(this), {\n\t\t\turl: this.url,\n\t\t\tstatus: this.status,\n\t\t\tstatusText: this.statusText,\n\t\t\theaders: this.headers,\n\t\t\tok: this.ok,\n\t\t\tredirected: this.redirected\n\t\t});\n\t}\n}\n\nBody.mixIn(Response.prototype);\n\nObject.defineProperties(Response.prototype, {\n\turl: { enumerable: true },\n\tstatus: { enumerable: true },\n\tok: { enumerable: true },\n\tredirected: { enumerable: true },\n\tstatusText: { enumerable: true },\n\theaders: { enumerable: true },\n\tclone: { enumerable: true }\n});\n\nObject.defineProperty(Response.prototype, Symbol.toStringTag, {\n\tvalue: 'Response',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nconst INTERNALS$2 = Symbol('Request internals');\nconst URL = Url.URL || whatwgUrl.URL;\n\n// fix an issue where \"format\", \"parse\" aren't a named export for node <10\nconst parse_url = Url.parse;\nconst format_url = Url.format;\n\n/**\n * Wrapper around `new URL` to handle arbitrary URLs\n *\n * @param {string} urlStr\n * @return {void}\n */\nfunction parseURL(urlStr) {\n\t/*\n \tCheck whether the URL is absolute or not\n \t\tScheme: https://tools.ietf.org/html/rfc3986#section-3.1\n \tAbsolute URL: https://tools.ietf.org/html/rfc3986#section-4.3\n */\n\tif (/^[a-zA-Z][a-zA-Z\\d+\\-.]*:/.exec(urlStr)) {\n\t\turlStr = new URL(urlStr).toString();\n\t}\n\n\t// Fallback to old implementation for arbitrary URLs\n\treturn parse_url(urlStr);\n}\n\nconst streamDestructionSupported = 'destroy' in Stream.Readable.prototype;\n\n/**\n * Check if a value is an instance of Request.\n *\n * @param Mixed input\n * @return Boolean\n */\nfunction isRequest(input) {\n\treturn typeof input === 'object' && typeof input[INTERNALS$2] === 'object';\n}\n\nfunction isAbortSignal(signal) {\n\tconst proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);\n\treturn !!(proto && proto.constructor.name === 'AbortSignal');\n}\n\n/**\n * Request class\n *\n * @param Mixed input Url or Request instance\n * @param Object init Custom options\n * @return Void\n */\nclass Request {\n\tconstructor(input) {\n\t\tlet init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tlet parsedURL;\n\n\t\t// normalize input\n\t\tif (!isRequest(input)) {\n\t\t\tif (input && input.href) {\n\t\t\t\t// in order to support Node.js' Url objects; though WHATWG's URL objects\n\t\t\t\t// will fall into this branch also (since their `toString()` will return\n\t\t\t\t// `href` property anyway)\n\t\t\t\tparsedURL = parseURL(input.href);\n\t\t\t} else {\n\t\t\t\t// coerce input to a string before attempting to parse\n\t\t\t\tparsedURL = parseURL(`${input}`);\n\t\t\t}\n\t\t\tinput = {};\n\t\t} else {\n\t\t\tparsedURL = parseURL(input.url);\n\t\t}\n\n\t\tlet method = init.method || input.method || 'GET';\n\t\tmethod = method.toUpperCase();\n\n\t\tif ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {\n\t\t\tthrow new TypeError('Request with GET/HEAD method cannot have body');\n\t\t}\n\n\t\tlet inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;\n\n\t\tBody.call(this, inputBody, {\n\t\t\ttimeout: init.timeout || input.timeout || 0,\n\t\t\tsize: init.size || input.size || 0\n\t\t});\n\n\t\tconst headers = new Headers(init.headers || input.headers || {});\n\n\t\tif (inputBody != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(inputBody);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tlet signal = isRequest(input) ? input.signal : null;\n\t\tif ('signal' in init) signal = init.signal;\n\n\t\tif (signal != null && !isAbortSignal(signal)) {\n\t\t\tthrow new TypeError('Expected signal to be an instanceof AbortSignal');\n\t\t}\n\n\t\tthis[INTERNALS$2] = {\n\t\t\tmethod,\n\t\t\tredirect: init.redirect || input.redirect || 'follow',\n\t\t\theaders,\n\t\t\tparsedURL,\n\t\t\tsignal\n\t\t};\n\n\t\t// node-fetch-only options\n\t\tthis.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;\n\t\tthis.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;\n\t\tthis.counter = init.counter || input.counter || 0;\n\t\tthis.agent = init.agent || input.agent;\n\t}\n\n\tget method() {\n\t\treturn this[INTERNALS$2].method;\n\t}\n\n\tget url() {\n\t\treturn format_url(this[INTERNALS$2].parsedURL);\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$2].headers;\n\t}\n\n\tget redirect() {\n\t\treturn this[INTERNALS$2].redirect;\n\t}\n\n\tget signal() {\n\t\treturn this[INTERNALS$2].signal;\n\t}\n\n\t/**\n * Clone this request\n *\n * @return Request\n */\n\tclone() {\n\t\treturn new Request(this);\n\t}\n}\n\nBody.mixIn(Request.prototype);\n\nObject.defineProperty(Request.prototype, Symbol.toStringTag, {\n\tvalue: 'Request',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Request.prototype, {\n\tmethod: { enumerable: true },\n\turl: { enumerable: true },\n\theaders: { enumerable: true },\n\tredirect: { enumerable: true },\n\tclone: { enumerable: true },\n\tsignal: { enumerable: true }\n});\n\n/**\n * Convert a Request to Node.js http request options.\n *\n * @param Request A Request instance\n * @return Object The options object to be passed to http.request\n */\nfunction getNodeRequestOptions(request) {\n\tconst parsedURL = request[INTERNALS$2].parsedURL;\n\tconst headers = new Headers(request[INTERNALS$2].headers);\n\n\t// fetch step 1.3\n\tif (!headers.has('Accept')) {\n\t\theaders.set('Accept', '*/*');\n\t}\n\n\t// Basic fetch\n\tif (!parsedURL.protocol || !parsedURL.hostname) {\n\t\tthrow new TypeError('Only absolute URLs are supported');\n\t}\n\n\tif (!/^https?:$/.test(parsedURL.protocol)) {\n\t\tthrow new TypeError('Only HTTP(S) protocols are supported');\n\t}\n\n\tif (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {\n\t\tthrow new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');\n\t}\n\n\t// HTTP-network-or-cache fetch steps 2.4-2.7\n\tlet contentLengthValue = null;\n\tif (request.body == null && /^(POST|PUT)$/i.test(request.method)) {\n\t\tcontentLengthValue = '0';\n\t}\n\tif (request.body != null) {\n\t\tconst totalBytes = getTotalBytes(request);\n\t\tif (typeof totalBytes === 'number') {\n\t\t\tcontentLengthValue = String(totalBytes);\n\t\t}\n\t}\n\tif (contentLengthValue) {\n\t\theaders.set('Content-Length', contentLengthValue);\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.11\n\tif (!headers.has('User-Agent')) {\n\t\theaders.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.15\n\tif (request.compress && !headers.has('Accept-Encoding')) {\n\t\theaders.set('Accept-Encoding', 'gzip,deflate');\n\t}\n\n\tlet agent = request.agent;\n\tif (typeof agent === 'function') {\n\t\tagent = agent(parsedURL);\n\t}\n\n\tif (!headers.has('Connection') && !agent) {\n\t\theaders.set('Connection', 'close');\n\t}\n\n\t// HTTP-network fetch step 4.2\n\t// chunked encoding is handled by Node.js\n\n\treturn Object.assign({}, parsedURL, {\n\t\tmethod: request.method,\n\t\theaders: exportNodeCompatibleHeaders(headers),\n\t\tagent\n\t});\n}\n\n/**\n * abort-error.js\n *\n * AbortError interface for cancelled requests\n */\n\n/**\n * Create AbortError instance\n *\n * @param String message Error message for human\n * @return AbortError\n */\nfunction AbortError(message) {\n Error.call(this, message);\n\n this.type = 'aborted';\n this.message = message;\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nAbortError.prototype = Object.create(Error.prototype);\nAbortError.prototype.constructor = AbortError;\nAbortError.prototype.name = 'AbortError';\n\nconst URL$1 = Url.URL || whatwgUrl.URL;\n\n// fix an issue where \"PassThrough\", \"resolve\" aren't a named export for node <10\nconst PassThrough$1 = Stream.PassThrough;\n\nconst isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) {\n\tconst orig = new URL$1(original).hostname;\n\tconst dest = new URL$1(destination).hostname;\n\n\treturn orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest);\n};\n\n/**\n * Fetch function\n *\n * @param Mixed url Absolute url or Request instance\n * @param Object opts Fetch options\n * @return Promise\n */\nfunction fetch(url, opts) {\n\n\t// allow custom promise\n\tif (!fetch.Promise) {\n\t\tthrow new Error('native promise missing, set fetch.Promise to your favorite alternative');\n\t}\n\n\tBody.Promise = fetch.Promise;\n\n\t// wrap http.request into fetch\n\treturn new fetch.Promise(function (resolve, reject) {\n\t\t// build request object\n\t\tconst request = new Request(url, opts);\n\t\tconst options = getNodeRequestOptions(request);\n\n\t\tconst send = (options.protocol === 'https:' ? https : http).request;\n\t\tconst signal = request.signal;\n\n\t\tlet response = null;\n\n\t\tconst abort = function abort() {\n\t\t\tlet error = new AbortError('The user aborted a request.');\n\t\t\treject(error);\n\t\t\tif (request.body && request.body instanceof Stream.Readable) {\n\t\t\t\trequest.body.destroy(error);\n\t\t\t}\n\t\t\tif (!response || !response.body) return;\n\t\t\tresponse.body.emit('error', error);\n\t\t};\n\n\t\tif (signal && signal.aborted) {\n\t\t\tabort();\n\t\t\treturn;\n\t\t}\n\n\t\tconst abortAndFinalize = function abortAndFinalize() {\n\t\t\tabort();\n\t\t\tfinalize();\n\t\t};\n\n\t\t// send request\n\t\tconst req = send(options);\n\t\tlet reqTimeout;\n\n\t\tif (signal) {\n\t\t\tsignal.addEventListener('abort', abortAndFinalize);\n\t\t}\n\n\t\tfunction finalize() {\n\t\t\treq.abort();\n\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\tclearTimeout(reqTimeout);\n\t\t}\n\n\t\tif (request.timeout) {\n\t\t\treq.once('socket', function (socket) {\n\t\t\t\treqTimeout = setTimeout(function () {\n\t\t\t\t\treject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));\n\t\t\t\t\tfinalize();\n\t\t\t\t}, request.timeout);\n\t\t\t});\n\t\t}\n\n\t\treq.on('error', function (err) {\n\t\t\treject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));\n\t\t\tfinalize();\n\t\t});\n\n\t\treq.on('response', function (res) {\n\t\t\tclearTimeout(reqTimeout);\n\n\t\t\tconst headers = createHeadersLenient(res.headers);\n\n\t\t\t// HTTP fetch step 5\n\t\t\tif (fetch.isRedirect(res.statusCode)) {\n\t\t\t\t// HTTP fetch step 5.2\n\t\t\t\tconst location = headers.get('Location');\n\n\t\t\t\t// HTTP fetch step 5.3\n\t\t\t\tlet locationURL = null;\n\t\t\t\ttry {\n\t\t\t\t\tlocationURL = location === null ? null : new URL$1(location, request.url).toString();\n\t\t\t\t} catch (err) {\n\t\t\t\t\t// error here can only be invalid URL in Location: header\n\t\t\t\t\t// do not throw when options.redirect == manual\n\t\t\t\t\t// let the user extract the errorneous redirect URL\n\t\t\t\t\tif (request.redirect !== 'manual') {\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// HTTP fetch step 5.5\n\t\t\t\tswitch (request.redirect) {\n\t\t\t\t\tcase 'error':\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\tcase 'manual':\n\t\t\t\t\t\t// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.\n\t\t\t\t\t\tif (locationURL !== null) {\n\t\t\t\t\t\t\t// handle corrupted header\n\t\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\t\theaders.set('Location', locationURL);\n\t\t\t\t\t\t\t} catch (err) {\n\t\t\t\t\t\t\t\t// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request\n\t\t\t\t\t\t\t\treject(err);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'follow':\n\t\t\t\t\t\t// HTTP-redirect fetch step 2\n\t\t\t\t\t\tif (locationURL === null) {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 5\n\t\t\t\t\t\tif (request.counter >= request.follow) {\n\t\t\t\t\t\t\treject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 6 (counter increment)\n\t\t\t\t\t\t// Create a new Request object.\n\t\t\t\t\t\tconst requestOpts = {\n\t\t\t\t\t\t\theaders: new Headers(request.headers),\n\t\t\t\t\t\t\tfollow: request.follow,\n\t\t\t\t\t\t\tcounter: request.counter + 1,\n\t\t\t\t\t\t\tagent: request.agent,\n\t\t\t\t\t\t\tcompress: request.compress,\n\t\t\t\t\t\t\tmethod: request.method,\n\t\t\t\t\t\t\tbody: request.body,\n\t\t\t\t\t\t\tsignal: request.signal,\n\t\t\t\t\t\t\ttimeout: request.timeout,\n\t\t\t\t\t\t\tsize: request.size\n\t\t\t\t\t\t};\n\n\t\t\t\t\t\tif (!isDomainOrSubdomain(request.url, locationURL)) {\n\t\t\t\t\t\t\tfor (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) {\n\t\t\t\t\t\t\t\trequestOpts.headers.delete(name);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 9\n\t\t\t\t\t\tif (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {\n\t\t\t\t\t\t\treject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 11\n\t\t\t\t\t\tif (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {\n\t\t\t\t\t\t\trequestOpts.method = 'GET';\n\t\t\t\t\t\t\trequestOpts.body = undefined;\n\t\t\t\t\t\t\trequestOpts.headers.delete('content-length');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 15\n\t\t\t\t\t\tresolve(fetch(new Request(locationURL, requestOpts)));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// prepare response\n\t\t\tres.once('end', function () {\n\t\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\t});\n\t\t\tlet body = res.pipe(new PassThrough$1());\n\n\t\t\tconst response_options = {\n\t\t\t\turl: request.url,\n\t\t\t\tstatus: res.statusCode,\n\t\t\t\tstatusText: res.statusMessage,\n\t\t\t\theaders: headers,\n\t\t\t\tsize: request.size,\n\t\t\t\ttimeout: request.timeout,\n\t\t\t\tcounter: request.counter\n\t\t\t};\n\n\t\t\t// HTTP-network fetch step 12.1.1.3\n\t\t\tconst codings = headers.get('Content-Encoding');\n\n\t\t\t// HTTP-network fetch step 12.1.1.4: handle content codings\n\n\t\t\t// in following scenarios we ignore compression support\n\t\t\t// 1. compression support is disabled\n\t\t\t// 2. HEAD request\n\t\t\t// 3. no Content-Encoding header\n\t\t\t// 4. no content response (204)\n\t\t\t// 5. content not modified response (304)\n\t\t\tif (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// For Node v6+\n\t\t\t// Be less strict when decoding compressed responses, since sometimes\n\t\t\t// servers send slightly invalid responses that are still accepted\n\t\t\t// by common browsers.\n\t\t\t// Always using Z_SYNC_FLUSH is what cURL does.\n\t\t\tconst zlibOptions = {\n\t\t\t\tflush: zlib.Z_SYNC_FLUSH,\n\t\t\t\tfinishFlush: zlib.Z_SYNC_FLUSH\n\t\t\t};\n\n\t\t\t// for gzip\n\t\t\tif (codings == 'gzip' || codings == 'x-gzip') {\n\t\t\t\tbody = body.pipe(zlib.createGunzip(zlibOptions));\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for deflate\n\t\t\tif (codings == 'deflate' || codings == 'x-deflate') {\n\t\t\t\t// handle the infamous raw deflate response from old servers\n\t\t\t\t// a hack for old IIS and Apache servers\n\t\t\t\tconst raw = res.pipe(new PassThrough$1());\n\t\t\t\traw.once('data', function (chunk) {\n\t\t\t\t\t// see http://stackoverflow.com/questions/37519828\n\t\t\t\t\tif ((chunk[0] & 0x0F) === 0x08) {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflate());\n\t\t\t\t\t} else {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflateRaw());\n\t\t\t\t\t}\n\t\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\t\tresolve(response);\n\t\t\t\t});\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for br\n\t\t\tif (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {\n\t\t\t\tbody = body.pipe(zlib.createBrotliDecompress());\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// otherwise, use response as-is\n\t\t\tresponse = new Response(body, response_options);\n\t\t\tresolve(response);\n\t\t});\n\n\t\twriteToStream(req, request);\n\t});\n}\n/**\n * Redirect code matching\n *\n * @param Number code Status code\n * @return Boolean\n */\nfetch.isRedirect = function (code) {\n\treturn code === 301 || code === 302 || code === 303 || code === 307 || code === 308;\n};\n\n// expose Promise\nfetch.Promise = global.Promise;\n\nmodule.exports = exports = fetch;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = exports;\nexports.Headers = Headers;\nexports.Request = Request;\nexports.Response = Response;\nexports.FetchError = FetchError;\n","\"use strict\";\n\nvar punycode = require(\"punycode\");\nvar mappingTable = require(\"./lib/mappingTable.json\");\n\nvar PROCESSING_OPTIONS = {\n TRANSITIONAL: 0,\n NONTRANSITIONAL: 1\n};\n\nfunction normalize(str) { // fix bug in v8\n return str.split('\\u0000').map(function (s) { return s.normalize('NFC'); }).join('\\u0000');\n}\n\nfunction findStatus(val) {\n var start = 0;\n var end = mappingTable.length - 1;\n\n while (start <= end) {\n var mid = Math.floor((start + end) / 2);\n\n var target = mappingTable[mid];\n if (target[0][0] <= val && target[0][1] >= val) {\n return target;\n } else if (target[0][0] > val) {\n end = mid - 1;\n } else {\n start = mid + 1;\n }\n }\n\n return null;\n}\n\nvar regexAstralSymbols = /[\\uD800-\\uDBFF][\\uDC00-\\uDFFF]/g;\n\nfunction countSymbols(string) {\n return string\n // replace every surrogate pair with a BMP symbol\n .replace(regexAstralSymbols, '_')\n // then get the length\n .length;\n}\n\nfunction mapChars(domain_name, useSTD3, processing_option) {\n var hasError = false;\n var processed = \"\";\n\n var len = countSymbols(domain_name);\n for (var i = 0; i < len; ++i) {\n var codePoint = domain_name.codePointAt(i);\n var status = findStatus(codePoint);\n\n switch (status[1]) {\n case \"disallowed\":\n hasError = true;\n processed += String.fromCodePoint(codePoint);\n break;\n case \"ignored\":\n break;\n case \"mapped\":\n processed += String.fromCodePoint.apply(String, status[2]);\n break;\n case \"deviation\":\n if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) {\n processed += String.fromCodePoint.apply(String, status[2]);\n } else {\n processed += String.fromCodePoint(codePoint);\n }\n break;\n case \"valid\":\n processed += String.fromCodePoint(codePoint);\n break;\n case \"disallowed_STD3_mapped\":\n if (useSTD3) {\n hasError = true;\n processed += String.fromCodePoint(codePoint);\n } else {\n processed += String.fromCodePoint.apply(String, status[2]);\n }\n break;\n case \"disallowed_STD3_valid\":\n if (useSTD3) {\n hasError = true;\n }\n\n processed += String.fromCodePoint(codePoint);\n break;\n }\n }\n\n return {\n string: processed,\n error: hasError\n };\n}\n\nvar combiningMarksRegex = /[\\u0300-\\u036F\\u0483-\\u0489\\u0591-\\u05BD\\u05BF\\u05C1\\u05C2\\u05C4\\u05C5\\u05C7\\u0610-\\u061A\\u064B-\\u065F\\u0670\\u06D6-\\u06DC\\u06DF-\\u06E4\\u06E7\\u06E8\\u06EA-\\u06ED\\u0711\\u0730-\\u074A\\u07A6-\\u07B0\\u07EB-\\u07F3\\u0816-\\u0819\\u081B-\\u0823\\u0825-\\u0827\\u0829-\\u082D\\u0859-\\u085B\\u08E4-\\u0903\\u093A-\\u093C\\u093E-\\u094F\\u0951-\\u0957\\u0962\\u0963\\u0981-\\u0983\\u09BC\\u09BE-\\u09C4\\u09C7\\u09C8\\u09CB-\\u09CD\\u09D7\\u09E2\\u09E3\\u0A01-\\u0A03\\u0A3C\\u0A3E-\\u0A42\\u0A47\\u0A48\\u0A4B-\\u0A4D\\u0A51\\u0A70\\u0A71\\u0A75\\u0A81-\\u0A83\\u0ABC\\u0ABE-\\u0AC5\\u0AC7-\\u0AC9\\u0ACB-\\u0ACD\\u0AE2\\u0AE3\\u0B01-\\u0B03\\u0B3C\\u0B3E-\\u0B44\\u0B47\\u0B48\\u0B4B-\\u0B4D\\u0B56\\u0B57\\u0B62\\u0B63\\u0B82\\u0BBE-\\u0BC2\\u0BC6-\\u0BC8\\u0BCA-\\u0BCD\\u0BD7\\u0C00-\\u0C03\\u0C3E-\\u0C44\\u0C46-\\u0C48\\u0C4A-\\u0C4D\\u0C55\\u0C56\\u0C62\\u0C63\\u0C81-\\u0C83\\u0CBC\\u0CBE-\\u0CC4\\u0CC6-\\u0CC8\\u0CCA-\\u0CCD\\u0CD5\\u0CD6\\u0CE2\\u0CE3\\u0D01-\\u0D03\\u0D3E-\\u0D44\\u0D46-\\u0D48\\u0D4A-\\u0D4D\\u0D57\\u0D62\\u0D63\\u0D82\\u0D83\\u0DCA\\u0DCF-\\u0DD4\\u0DD6\\u0DD8-\\u0DDF\\u0DF2\\u0DF3\\u0E31\\u0E34-\\u0E3A\\u0E47-\\u0E4E\\u0EB1\\u0EB4-\\u0EB9\\u0EBB\\u0EBC\\u0EC8-\\u0ECD\\u0F18\\u0F19\\u0F35\\u0F37\\u0F39\\u0F3E\\u0F3F\\u0F71-\\u0F84\\u0F86\\u0F87\\u0F8D-\\u0F97\\u0F99-\\u0FBC\\u0FC6\\u102B-\\u103E\\u1056-\\u1059\\u105E-\\u1060\\u1062-\\u1064\\u1067-\\u106D\\u1071-\\u1074\\u1082-\\u108D\\u108F\\u109A-\\u109D\\u135D-\\u135F\\u1712-\\u1714\\u1732-\\u1734\\u1752\\u1753\\u1772\\u1773\\u17B4-\\u17D3\\u17DD\\u180B-\\u180D\\u18A9\\u1920-\\u192B\\u1930-\\u193B\\u19B0-\\u19C0\\u19C8\\u19C9\\u1A17-\\u1A1B\\u1A55-\\u1A5E\\u1A60-\\u1A7C\\u1A7F\\u1AB0-\\u1ABE\\u1B00-\\u1B04\\u1B34-\\u1B44\\u1B6B-\\u1B73\\u1B80-\\u1B82\\u1BA1-\\u1BAD\\u1BE6-\\u1BF3\\u1C24-\\u1C37\\u1CD0-\\u1CD2\\u1CD4-\\u1CE8\\u1CED\\u1CF2-\\u1CF4\\u1CF8\\u1CF9\\u1DC0-\\u1DF5\\u1DFC-\\u1DFF\\u20D0-\\u20F0\\u2CEF-\\u2CF1\\u2D7F\\u2DE0-\\u2DFF\\u302A-\\u302F\\u3099\\u309A\\uA66F-\\uA672\\uA674-\\uA67D\\uA69F\\uA6F0\\uA6F1\\uA802\\uA806\\uA80B\\uA823-\\uA827\\uA880\\uA881\\uA8B4-\\uA8C4\\uA8E0-\\uA8F1\\uA926-\\uA92D\\uA947-\\uA953\\uA980-\\uA983\\uA9B3-\\uA9C0\\uA9E5\\uAA29-\\uAA36\\uAA43\\uAA4C\\uAA4D\\uAA7B-\\uAA7D\\uAAB0\\uAAB2-\\uAAB4\\uAAB7\\uAAB8\\uAABE\\uAABF\\uAAC1\\uAAEB-\\uAAEF\\uAAF5\\uAAF6\\uABE3-\\uABEA\\uABEC\\uABED\\uFB1E\\uFE00-\\uFE0F\\uFE20-\\uFE2D]|\\uD800[\\uDDFD\\uDEE0\\uDF76-\\uDF7A]|\\uD802[\\uDE01-\\uDE03\\uDE05\\uDE06\\uDE0C-\\uDE0F\\uDE38-\\uDE3A\\uDE3F\\uDEE5\\uDEE6]|\\uD804[\\uDC00-\\uDC02\\uDC38-\\uDC46\\uDC7F-\\uDC82\\uDCB0-\\uDCBA\\uDD00-\\uDD02\\uDD27-\\uDD34\\uDD73\\uDD80-\\uDD82\\uDDB3-\\uDDC0\\uDE2C-\\uDE37\\uDEDF-\\uDEEA\\uDF01-\\uDF03\\uDF3C\\uDF3E-\\uDF44\\uDF47\\uDF48\\uDF4B-\\uDF4D\\uDF57\\uDF62\\uDF63\\uDF66-\\uDF6C\\uDF70-\\uDF74]|\\uD805[\\uDCB0-\\uDCC3\\uDDAF-\\uDDB5\\uDDB8-\\uDDC0\\uDE30-\\uDE40\\uDEAB-\\uDEB7]|\\uD81A[\\uDEF0-\\uDEF4\\uDF30-\\uDF36]|\\uD81B[\\uDF51-\\uDF7E\\uDF8F-\\uDF92]|\\uD82F[\\uDC9D\\uDC9E]|\\uD834[\\uDD65-\\uDD69\\uDD6D-\\uDD72\\uDD7B-\\uDD82\\uDD85-\\uDD8B\\uDDAA-\\uDDAD\\uDE42-\\uDE44]|\\uD83A[\\uDCD0-\\uDCD6]|\\uDB40[\\uDD00-\\uDDEF]/;\n\nfunction validateLabel(label, processing_option) {\n if (label.substr(0, 4) === \"xn--\") {\n label = punycode.toUnicode(label);\n processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL;\n }\n\n var error = false;\n\n if (normalize(label) !== label ||\n (label[3] === \"-\" && label[4] === \"-\") ||\n label[0] === \"-\" || label[label.length - 1] === \"-\" ||\n label.indexOf(\".\") !== -1 ||\n label.search(combiningMarksRegex) === 0) {\n error = true;\n }\n\n var len = countSymbols(label);\n for (var i = 0; i < len; ++i) {\n var status = findStatus(label.codePointAt(i));\n if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== \"valid\") ||\n (processing === PROCESSING_OPTIONS.NONTRANSITIONAL &&\n status[1] !== \"valid\" && status[1] !== \"deviation\")) {\n error = true;\n break;\n }\n }\n\n return {\n label: label,\n error: error\n };\n}\n\nfunction processing(domain_name, useSTD3, processing_option) {\n var result = mapChars(domain_name, useSTD3, processing_option);\n result.string = normalize(result.string);\n\n var labels = result.string.split(\".\");\n for (var i = 0; i < labels.length; ++i) {\n try {\n var validation = validateLabel(labels[i]);\n labels[i] = validation.label;\n result.error = result.error || validation.error;\n } catch(e) {\n result.error = true;\n }\n }\n\n return {\n string: labels.join(\".\"),\n error: result.error\n };\n}\n\nmodule.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) {\n var result = processing(domain_name, useSTD3, processing_option);\n var labels = result.string.split(\".\");\n labels = labels.map(function(l) {\n try {\n return punycode.toASCII(l);\n } catch(e) {\n result.error = true;\n return l;\n }\n });\n\n if (verifyDnsLength) {\n var total = labels.slice(0, labels.length - 1).join(\".\").length;\n if (total.length > 253 || total.length === 0) {\n result.error = true;\n }\n\n for (var i=0; i < labels.length; ++i) {\n if (labels.length > 63 || labels.length === 0) {\n result.error = true;\n break;\n }\n }\n }\n\n if (result.error) return null;\n return labels.join(\".\");\n};\n\nmodule.exports.toUnicode = function(domain_name, useSTD3) {\n var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL);\n\n return {\n domain: result.string,\n error: result.error\n };\n};\n\nmodule.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS;\n","\"use strict\";\n\nvar conversions = {};\nmodule.exports = conversions;\n\nfunction sign(x) {\n return x < 0 ? -1 : 1;\n}\n\nfunction evenRound(x) {\n // Round x to the nearest integer, choosing the even integer if it lies halfway between two.\n if ((x % 1) === 0.5 && (x & 1) === 0) { // [even number].5; round down (i.e. floor)\n return Math.floor(x);\n } else {\n return Math.round(x);\n }\n}\n\nfunction createNumberConversion(bitLength, typeOpts) {\n if (!typeOpts.unsigned) {\n --bitLength;\n }\n const lowerBound = typeOpts.unsigned ? 0 : -Math.pow(2, bitLength);\n const upperBound = Math.pow(2, bitLength) - 1;\n\n const moduloVal = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength) : Math.pow(2, bitLength);\n const moduloBound = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength - 1) : Math.pow(2, bitLength - 1);\n\n return function(V, opts) {\n if (!opts) opts = {};\n\n let x = +V;\n\n if (opts.enforceRange) {\n if (!Number.isFinite(x)) {\n throw new TypeError(\"Argument is not a finite number\");\n }\n\n x = sign(x) * Math.floor(Math.abs(x));\n if (x < lowerBound || x > upperBound) {\n throw new TypeError(\"Argument is not in byte range\");\n }\n\n return x;\n }\n\n if (!isNaN(x) && opts.clamp) {\n x = evenRound(x);\n\n if (x < lowerBound) x = lowerBound;\n if (x > upperBound) x = upperBound;\n return x;\n }\n\n if (!Number.isFinite(x) || x === 0) {\n return 0;\n }\n\n x = sign(x) * Math.floor(Math.abs(x));\n x = x % moduloVal;\n\n if (!typeOpts.unsigned && x >= moduloBound) {\n return x - moduloVal;\n } else if (typeOpts.unsigned) {\n if (x < 0) {\n x += moduloVal;\n } else if (x === -0) { // don't return negative zero\n return 0;\n }\n }\n\n return x;\n }\n}\n\nconversions[\"void\"] = function () {\n return undefined;\n};\n\nconversions[\"boolean\"] = function (val) {\n return !!val;\n};\n\nconversions[\"byte\"] = createNumberConversion(8, { unsigned: false });\nconversions[\"octet\"] = createNumberConversion(8, { unsigned: true });\n\nconversions[\"short\"] = createNumberConversion(16, { unsigned: false });\nconversions[\"unsigned short\"] = createNumberConversion(16, { unsigned: true });\n\nconversions[\"long\"] = createNumberConversion(32, { unsigned: false });\nconversions[\"unsigned long\"] = createNumberConversion(32, { unsigned: true });\n\nconversions[\"long long\"] = createNumberConversion(32, { unsigned: false, moduloBitLength: 64 });\nconversions[\"unsigned long long\"] = createNumberConversion(32, { unsigned: true, moduloBitLength: 64 });\n\nconversions[\"double\"] = function (V) {\n const x = +V;\n\n if (!Number.isFinite(x)) {\n throw new TypeError(\"Argument is not a finite floating-point value\");\n }\n\n return x;\n};\n\nconversions[\"unrestricted double\"] = function (V) {\n const x = +V;\n\n if (isNaN(x)) {\n throw new TypeError(\"Argument is NaN\");\n }\n\n return x;\n};\n\n// not quite valid, but good enough for JS\nconversions[\"float\"] = conversions[\"double\"];\nconversions[\"unrestricted float\"] = conversions[\"unrestricted double\"];\n\nconversions[\"DOMString\"] = function (V, opts) {\n if (!opts) opts = {};\n\n if (opts.treatNullAsEmptyString && V === null) {\n return \"\";\n }\n\n return String(V);\n};\n\nconversions[\"ByteString\"] = function (V, opts) {\n const x = String(V);\n let c = undefined;\n for (let i = 0; (c = x.codePointAt(i)) !== undefined; ++i) {\n if (c > 255) {\n throw new TypeError(\"Argument is not a valid bytestring\");\n }\n }\n\n return x;\n};\n\nconversions[\"USVString\"] = function (V) {\n const S = String(V);\n const n = S.length;\n const U = [];\n for (let i = 0; i < n; ++i) {\n const c = S.charCodeAt(i);\n if (c < 0xD800 || c > 0xDFFF) {\n U.push(String.fromCodePoint(c));\n } else if (0xDC00 <= c && c <= 0xDFFF) {\n U.push(String.fromCodePoint(0xFFFD));\n } else {\n if (i === n - 1) {\n U.push(String.fromCodePoint(0xFFFD));\n } else {\n const d = S.charCodeAt(i + 1);\n if (0xDC00 <= d && d <= 0xDFFF) {\n const a = c & 0x3FF;\n const b = d & 0x3FF;\n U.push(String.fromCodePoint((2 << 15) + (2 << 9) * a + b));\n ++i;\n } else {\n U.push(String.fromCodePoint(0xFFFD));\n }\n }\n }\n }\n\n return U.join('');\n};\n\nconversions[\"Date\"] = function (V, opts) {\n if (!(V instanceof Date)) {\n throw new TypeError(\"Argument is not a Date object\");\n }\n if (isNaN(V)) {\n return undefined;\n }\n\n return V;\n};\n\nconversions[\"RegExp\"] = function (V, opts) {\n if (!(V instanceof RegExp)) {\n V = new RegExp(V);\n }\n\n return V;\n};\n","\"use strict\";\nconst usm = require(\"./url-state-machine\");\n\nexports.implementation = class URLImpl {\n constructor(constructorArgs) {\n const url = constructorArgs[0];\n const base = constructorArgs[1];\n\n let parsedBase = null;\n if (base !== undefined) {\n parsedBase = usm.basicURLParse(base);\n if (parsedBase === \"failure\") {\n throw new TypeError(\"Invalid base URL\");\n }\n }\n\n const parsedURL = usm.basicURLParse(url, { baseURL: parsedBase });\n if (parsedURL === \"failure\") {\n throw new TypeError(\"Invalid URL\");\n }\n\n this._url = parsedURL;\n\n // TODO: query stuff\n }\n\n get href() {\n return usm.serializeURL(this._url);\n }\n\n set href(v) {\n const parsedURL = usm.basicURLParse(v);\n if (parsedURL === \"failure\") {\n throw new TypeError(\"Invalid URL\");\n }\n\n this._url = parsedURL;\n }\n\n get origin() {\n return usm.serializeURLOrigin(this._url);\n }\n\n get protocol() {\n return this._url.scheme + \":\";\n }\n\n set protocol(v) {\n usm.basicURLParse(v + \":\", { url: this._url, stateOverride: \"scheme start\" });\n }\n\n get username() {\n return this._url.username;\n }\n\n set username(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n usm.setTheUsername(this._url, v);\n }\n\n get password() {\n return this._url.password;\n }\n\n set password(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n usm.setThePassword(this._url, v);\n }\n\n get host() {\n const url = this._url;\n\n if (url.host === null) {\n return \"\";\n }\n\n if (url.port === null) {\n return usm.serializeHost(url.host);\n }\n\n return usm.serializeHost(url.host) + \":\" + usm.serializeInteger(url.port);\n }\n\n set host(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n usm.basicURLParse(v, { url: this._url, stateOverride: \"host\" });\n }\n\n get hostname() {\n if (this._url.host === null) {\n return \"\";\n }\n\n return usm.serializeHost(this._url.host);\n }\n\n set hostname(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n usm.basicURLParse(v, { url: this._url, stateOverride: \"hostname\" });\n }\n\n get port() {\n if (this._url.port === null) {\n return \"\";\n }\n\n return usm.serializeInteger(this._url.port);\n }\n\n set port(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n if (v === \"\") {\n this._url.port = null;\n } else {\n usm.basicURLParse(v, { url: this._url, stateOverride: \"port\" });\n }\n }\n\n get pathname() {\n if (this._url.cannotBeABaseURL) {\n return this._url.path[0];\n }\n\n if (this._url.path.length === 0) {\n return \"\";\n }\n\n return \"/\" + this._url.path.join(\"/\");\n }\n\n set pathname(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n this._url.path = [];\n usm.basicURLParse(v, { url: this._url, stateOverride: \"path start\" });\n }\n\n get search() {\n if (this._url.query === null || this._url.query === \"\") {\n return \"\";\n }\n\n return \"?\" + this._url.query;\n }\n\n set search(v) {\n // TODO: query stuff\n\n const url = this._url;\n\n if (v === \"\") {\n url.query = null;\n return;\n }\n\n const input = v[0] === \"?\" ? v.substring(1) : v;\n url.query = \"\";\n usm.basicURLParse(input, { url, stateOverride: \"query\" });\n }\n\n get hash() {\n if (this._url.fragment === null || this._url.fragment === \"\") {\n return \"\";\n }\n\n return \"#\" + this._url.fragment;\n }\n\n set hash(v) {\n if (v === \"\") {\n this._url.fragment = null;\n return;\n }\n\n const input = v[0] === \"#\" ? v.substring(1) : v;\n this._url.fragment = \"\";\n usm.basicURLParse(input, { url: this._url, stateOverride: \"fragment\" });\n }\n\n toJSON() {\n return this.href;\n }\n};\n","\"use strict\";\n\nconst conversions = require(\"webidl-conversions\");\nconst utils = require(\"./utils.js\");\nconst Impl = require(\".//URL-impl.js\");\n\nconst impl = utils.implSymbol;\n\nfunction URL(url) {\n if (!this || this[impl] || !(this instanceof URL)) {\n throw new TypeError(\"Failed to construct 'URL': Please use the 'new' operator, this DOM object constructor cannot be called as a function.\");\n }\n if (arguments.length < 1) {\n throw new TypeError(\"Failed to construct 'URL': 1 argument required, but only \" + arguments.length + \" present.\");\n }\n const args = [];\n for (let i = 0; i < arguments.length && i < 2; ++i) {\n args[i] = arguments[i];\n }\n args[0] = conversions[\"USVString\"](args[0]);\n if (args[1] !== undefined) {\n args[1] = conversions[\"USVString\"](args[1]);\n }\n\n module.exports.setup(this, args);\n}\n\nURL.prototype.toJSON = function toJSON() {\n if (!this || !module.exports.is(this)) {\n throw new TypeError(\"Illegal invocation\");\n }\n const args = [];\n for (let i = 0; i < arguments.length && i < 0; ++i) {\n args[i] = arguments[i];\n }\n return this[impl].toJSON.apply(this[impl], args);\n};\nObject.defineProperty(URL.prototype, \"href\", {\n get() {\n return this[impl].href;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].href = V;\n },\n enumerable: true,\n configurable: true\n});\n\nURL.prototype.toString = function () {\n if (!this || !module.exports.is(this)) {\n throw new TypeError(\"Illegal invocation\");\n }\n return this.href;\n};\n\nObject.defineProperty(URL.prototype, \"origin\", {\n get() {\n return this[impl].origin;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"protocol\", {\n get() {\n return this[impl].protocol;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].protocol = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"username\", {\n get() {\n return this[impl].username;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].username = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"password\", {\n get() {\n return this[impl].password;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].password = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"host\", {\n get() {\n return this[impl].host;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].host = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"hostname\", {\n get() {\n return this[impl].hostname;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].hostname = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"port\", {\n get() {\n return this[impl].port;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].port = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"pathname\", {\n get() {\n return this[impl].pathname;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].pathname = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"search\", {\n get() {\n return this[impl].search;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].search = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"hash\", {\n get() {\n return this[impl].hash;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].hash = V;\n },\n enumerable: true,\n configurable: true\n});\n\n\nmodule.exports = {\n is(obj) {\n return !!obj && obj[impl] instanceof Impl.implementation;\n },\n create(constructorArgs, privateData) {\n let obj = Object.create(URL.prototype);\n this.setup(obj, constructorArgs, privateData);\n return obj;\n },\n setup(obj, constructorArgs, privateData) {\n if (!privateData) privateData = {};\n privateData.wrapper = obj;\n\n obj[impl] = new Impl.implementation(constructorArgs, privateData);\n obj[impl][utils.wrapperSymbol] = obj;\n },\n interface: URL,\n expose: {\n Window: { URL: URL },\n Worker: { URL: URL }\n }\n};\n\n","\"use strict\";\n\nexports.URL = require(\"./URL\").interface;\nexports.serializeURL = require(\"./url-state-machine\").serializeURL;\nexports.serializeURLOrigin = require(\"./url-state-machine\").serializeURLOrigin;\nexports.basicURLParse = require(\"./url-state-machine\").basicURLParse;\nexports.setTheUsername = require(\"./url-state-machine\").setTheUsername;\nexports.setThePassword = require(\"./url-state-machine\").setThePassword;\nexports.serializeHost = require(\"./url-state-machine\").serializeHost;\nexports.serializeInteger = require(\"./url-state-machine\").serializeInteger;\nexports.parseURL = require(\"./url-state-machine\").parseURL;\n","\"use strict\";\r\nconst punycode = require(\"punycode\");\r\nconst tr46 = require(\"tr46\");\r\n\r\nconst specialSchemes = {\r\n ftp: 21,\r\n file: null,\r\n gopher: 70,\r\n http: 80,\r\n https: 443,\r\n ws: 80,\r\n wss: 443\r\n};\r\n\r\nconst failure = Symbol(\"failure\");\r\n\r\nfunction countSymbols(str) {\r\n return punycode.ucs2.decode(str).length;\r\n}\r\n\r\nfunction at(input, idx) {\r\n const c = input[idx];\r\n return isNaN(c) ? undefined : String.fromCodePoint(c);\r\n}\r\n\r\nfunction isASCIIDigit(c) {\r\n return c >= 0x30 && c <= 0x39;\r\n}\r\n\r\nfunction isASCIIAlpha(c) {\r\n return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A);\r\n}\r\n\r\nfunction isASCIIAlphanumeric(c) {\r\n return isASCIIAlpha(c) || isASCIIDigit(c);\r\n}\r\n\r\nfunction isASCIIHex(c) {\r\n return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66);\r\n}\r\n\r\nfunction isSingleDot(buffer) {\r\n return buffer === \".\" || buffer.toLowerCase() === \"%2e\";\r\n}\r\n\r\nfunction isDoubleDot(buffer) {\r\n buffer = buffer.toLowerCase();\r\n return buffer === \"..\" || buffer === \"%2e.\" || buffer === \".%2e\" || buffer === \"%2e%2e\";\r\n}\r\n\r\nfunction isWindowsDriveLetterCodePoints(cp1, cp2) {\r\n return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124);\r\n}\r\n\r\nfunction isWindowsDriveLetterString(string) {\r\n return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === \":\" || string[1] === \"|\");\r\n}\r\n\r\nfunction isNormalizedWindowsDriveLetterString(string) {\r\n return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === \":\";\r\n}\r\n\r\nfunction containsForbiddenHostCodePoint(string) {\r\n return string.search(/\\u0000|\\u0009|\\u000A|\\u000D|\\u0020|#|%|\\/|:|\\?|@|\\[|\\\\|\\]/) !== -1;\r\n}\r\n\r\nfunction containsForbiddenHostCodePointExcludingPercent(string) {\r\n return string.search(/\\u0000|\\u0009|\\u000A|\\u000D|\\u0020|#|\\/|:|\\?|@|\\[|\\\\|\\]/) !== -1;\r\n}\r\n\r\nfunction isSpecialScheme(scheme) {\r\n return specialSchemes[scheme] !== undefined;\r\n}\r\n\r\nfunction isSpecial(url) {\r\n return isSpecialScheme(url.scheme);\r\n}\r\n\r\nfunction defaultPort(scheme) {\r\n return specialSchemes[scheme];\r\n}\r\n\r\nfunction percentEncode(c) {\r\n let hex = c.toString(16).toUpperCase();\r\n if (hex.length === 1) {\r\n hex = \"0\" + hex;\r\n }\r\n\r\n return \"%\" + hex;\r\n}\r\n\r\nfunction utf8PercentEncode(c) {\r\n const buf = new Buffer(c);\r\n\r\n let str = \"\";\r\n\r\n for (let i = 0; i < buf.length; ++i) {\r\n str += percentEncode(buf[i]);\r\n }\r\n\r\n return str;\r\n}\r\n\r\nfunction utf8PercentDecode(str) {\r\n const input = new Buffer(str);\r\n const output = [];\r\n for (let i = 0; i < input.length; ++i) {\r\n if (input[i] !== 37) {\r\n output.push(input[i]);\r\n } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) {\r\n output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16));\r\n i += 2;\r\n } else {\r\n output.push(input[i]);\r\n }\r\n }\r\n return new Buffer(output).toString();\r\n}\r\n\r\nfunction isC0ControlPercentEncode(c) {\r\n return c <= 0x1F || c > 0x7E;\r\n}\r\n\r\nconst extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]);\r\nfunction isPathPercentEncode(c) {\r\n return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c);\r\n}\r\n\r\nconst extraUserinfoPercentEncodeSet =\r\n new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]);\r\nfunction isUserinfoPercentEncode(c) {\r\n return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c);\r\n}\r\n\r\nfunction percentEncodeChar(c, encodeSetPredicate) {\r\n const cStr = String.fromCodePoint(c);\r\n\r\n if (encodeSetPredicate(c)) {\r\n return utf8PercentEncode(cStr);\r\n }\r\n\r\n return cStr;\r\n}\r\n\r\nfunction parseIPv4Number(input) {\r\n let R = 10;\r\n\r\n if (input.length >= 2 && input.charAt(0) === \"0\" && input.charAt(1).toLowerCase() === \"x\") {\r\n input = input.substring(2);\r\n R = 16;\r\n } else if (input.length >= 2 && input.charAt(0) === \"0\") {\r\n input = input.substring(1);\r\n R = 8;\r\n }\r\n\r\n if (input === \"\") {\r\n return 0;\r\n }\r\n\r\n const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/);\r\n if (regex.test(input)) {\r\n return failure;\r\n }\r\n\r\n return parseInt(input, R);\r\n}\r\n\r\nfunction parseIPv4(input) {\r\n const parts = input.split(\".\");\r\n if (parts[parts.length - 1] === \"\") {\r\n if (parts.length > 1) {\r\n parts.pop();\r\n }\r\n }\r\n\r\n if (parts.length > 4) {\r\n return input;\r\n }\r\n\r\n const numbers = [];\r\n for (const part of parts) {\r\n if (part === \"\") {\r\n return input;\r\n }\r\n const n = parseIPv4Number(part);\r\n if (n === failure) {\r\n return input;\r\n }\r\n\r\n numbers.push(n);\r\n }\r\n\r\n for (let i = 0; i < numbers.length - 1; ++i) {\r\n if (numbers[i] > 255) {\r\n return failure;\r\n }\r\n }\r\n if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) {\r\n return failure;\r\n }\r\n\r\n let ipv4 = numbers.pop();\r\n let counter = 0;\r\n\r\n for (const n of numbers) {\r\n ipv4 += n * Math.pow(256, 3 - counter);\r\n ++counter;\r\n }\r\n\r\n return ipv4;\r\n}\r\n\r\nfunction serializeIPv4(address) {\r\n let output = \"\";\r\n let n = address;\r\n\r\n for (let i = 1; i <= 4; ++i) {\r\n output = String(n % 256) + output;\r\n if (i !== 4) {\r\n output = \".\" + output;\r\n }\r\n n = Math.floor(n / 256);\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction parseIPv6(input) {\r\n const address = [0, 0, 0, 0, 0, 0, 0, 0];\r\n let pieceIndex = 0;\r\n let compress = null;\r\n let pointer = 0;\r\n\r\n input = punycode.ucs2.decode(input);\r\n\r\n if (input[pointer] === 58) {\r\n if (input[pointer + 1] !== 58) {\r\n return failure;\r\n }\r\n\r\n pointer += 2;\r\n ++pieceIndex;\r\n compress = pieceIndex;\r\n }\r\n\r\n while (pointer < input.length) {\r\n if (pieceIndex === 8) {\r\n return failure;\r\n }\r\n\r\n if (input[pointer] === 58) {\r\n if (compress !== null) {\r\n return failure;\r\n }\r\n ++pointer;\r\n ++pieceIndex;\r\n compress = pieceIndex;\r\n continue;\r\n }\r\n\r\n let value = 0;\r\n let length = 0;\r\n\r\n while (length < 4 && isASCIIHex(input[pointer])) {\r\n value = value * 0x10 + parseInt(at(input, pointer), 16);\r\n ++pointer;\r\n ++length;\r\n }\r\n\r\n if (input[pointer] === 46) {\r\n if (length === 0) {\r\n return failure;\r\n }\r\n\r\n pointer -= length;\r\n\r\n if (pieceIndex > 6) {\r\n return failure;\r\n }\r\n\r\n let numbersSeen = 0;\r\n\r\n while (input[pointer] !== undefined) {\r\n let ipv4Piece = null;\r\n\r\n if (numbersSeen > 0) {\r\n if (input[pointer] === 46 && numbersSeen < 4) {\r\n ++pointer;\r\n } else {\r\n return failure;\r\n }\r\n }\r\n\r\n if (!isASCIIDigit(input[pointer])) {\r\n return failure;\r\n }\r\n\r\n while (isASCIIDigit(input[pointer])) {\r\n const number = parseInt(at(input, pointer));\r\n if (ipv4Piece === null) {\r\n ipv4Piece = number;\r\n } else if (ipv4Piece === 0) {\r\n return failure;\r\n } else {\r\n ipv4Piece = ipv4Piece * 10 + number;\r\n }\r\n if (ipv4Piece > 255) {\r\n return failure;\r\n }\r\n ++pointer;\r\n }\r\n\r\n address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece;\r\n\r\n ++numbersSeen;\r\n\r\n if (numbersSeen === 2 || numbersSeen === 4) {\r\n ++pieceIndex;\r\n }\r\n }\r\n\r\n if (numbersSeen !== 4) {\r\n return failure;\r\n }\r\n\r\n break;\r\n } else if (input[pointer] === 58) {\r\n ++pointer;\r\n if (input[pointer] === undefined) {\r\n return failure;\r\n }\r\n } else if (input[pointer] !== undefined) {\r\n return failure;\r\n }\r\n\r\n address[pieceIndex] = value;\r\n ++pieceIndex;\r\n }\r\n\r\n if (compress !== null) {\r\n let swaps = pieceIndex - compress;\r\n pieceIndex = 7;\r\n while (pieceIndex !== 0 && swaps > 0) {\r\n const temp = address[compress + swaps - 1];\r\n address[compress + swaps - 1] = address[pieceIndex];\r\n address[pieceIndex] = temp;\r\n --pieceIndex;\r\n --swaps;\r\n }\r\n } else if (compress === null && pieceIndex !== 8) {\r\n return failure;\r\n }\r\n\r\n return address;\r\n}\r\n\r\nfunction serializeIPv6(address) {\r\n let output = \"\";\r\n const seqResult = findLongestZeroSequence(address);\r\n const compress = seqResult.idx;\r\n let ignore0 = false;\r\n\r\n for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) {\r\n if (ignore0 && address[pieceIndex] === 0) {\r\n continue;\r\n } else if (ignore0) {\r\n ignore0 = false;\r\n }\r\n\r\n if (compress === pieceIndex) {\r\n const separator = pieceIndex === 0 ? \"::\" : \":\";\r\n output += separator;\r\n ignore0 = true;\r\n continue;\r\n }\r\n\r\n output += address[pieceIndex].toString(16);\r\n\r\n if (pieceIndex !== 7) {\r\n output += \":\";\r\n }\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction parseHost(input, isSpecialArg) {\r\n if (input[0] === \"[\") {\r\n if (input[input.length - 1] !== \"]\") {\r\n return failure;\r\n }\r\n\r\n return parseIPv6(input.substring(1, input.length - 1));\r\n }\r\n\r\n if (!isSpecialArg) {\r\n return parseOpaqueHost(input);\r\n }\r\n\r\n const domain = utf8PercentDecode(input);\r\n const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false);\r\n if (asciiDomain === null) {\r\n return failure;\r\n }\r\n\r\n if (containsForbiddenHostCodePoint(asciiDomain)) {\r\n return failure;\r\n }\r\n\r\n const ipv4Host = parseIPv4(asciiDomain);\r\n if (typeof ipv4Host === \"number\" || ipv4Host === failure) {\r\n return ipv4Host;\r\n }\r\n\r\n return asciiDomain;\r\n}\r\n\r\nfunction parseOpaqueHost(input) {\r\n if (containsForbiddenHostCodePointExcludingPercent(input)) {\r\n return failure;\r\n }\r\n\r\n let output = \"\";\r\n const decoded = punycode.ucs2.decode(input);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n output += percentEncodeChar(decoded[i], isC0ControlPercentEncode);\r\n }\r\n return output;\r\n}\r\n\r\nfunction findLongestZeroSequence(arr) {\r\n let maxIdx = null;\r\n let maxLen = 1; // only find elements > 1\r\n let currStart = null;\r\n let currLen = 0;\r\n\r\n for (let i = 0; i < arr.length; ++i) {\r\n if (arr[i] !== 0) {\r\n if (currLen > maxLen) {\r\n maxIdx = currStart;\r\n maxLen = currLen;\r\n }\r\n\r\n currStart = null;\r\n currLen = 0;\r\n } else {\r\n if (currStart === null) {\r\n currStart = i;\r\n }\r\n ++currLen;\r\n }\r\n }\r\n\r\n // if trailing zeros\r\n if (currLen > maxLen) {\r\n maxIdx = currStart;\r\n maxLen = currLen;\r\n }\r\n\r\n return {\r\n idx: maxIdx,\r\n len: maxLen\r\n };\r\n}\r\n\r\nfunction serializeHost(host) {\r\n if (typeof host === \"number\") {\r\n return serializeIPv4(host);\r\n }\r\n\r\n // IPv6 serializer\r\n if (host instanceof Array) {\r\n return \"[\" + serializeIPv6(host) + \"]\";\r\n }\r\n\r\n return host;\r\n}\r\n\r\nfunction trimControlChars(url) {\r\n return url.replace(/^[\\u0000-\\u001F\\u0020]+|[\\u0000-\\u001F\\u0020]+$/g, \"\");\r\n}\r\n\r\nfunction trimTabAndNewline(url) {\r\n return url.replace(/\\u0009|\\u000A|\\u000D/g, \"\");\r\n}\r\n\r\nfunction shortenPath(url) {\r\n const path = url.path;\r\n if (path.length === 0) {\r\n return;\r\n }\r\n if (url.scheme === \"file\" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) {\r\n return;\r\n }\r\n\r\n path.pop();\r\n}\r\n\r\nfunction includesCredentials(url) {\r\n return url.username !== \"\" || url.password !== \"\";\r\n}\r\n\r\nfunction cannotHaveAUsernamePasswordPort(url) {\r\n return url.host === null || url.host === \"\" || url.cannotBeABaseURL || url.scheme === \"file\";\r\n}\r\n\r\nfunction isNormalizedWindowsDriveLetter(string) {\r\n return /^[A-Za-z]:$/.test(string);\r\n}\r\n\r\nfunction URLStateMachine(input, base, encodingOverride, url, stateOverride) {\r\n this.pointer = 0;\r\n this.input = input;\r\n this.base = base || null;\r\n this.encodingOverride = encodingOverride || \"utf-8\";\r\n this.stateOverride = stateOverride;\r\n this.url = url;\r\n this.failure = false;\r\n this.parseError = false;\r\n\r\n if (!this.url) {\r\n this.url = {\r\n scheme: \"\",\r\n username: \"\",\r\n password: \"\",\r\n host: null,\r\n port: null,\r\n path: [],\r\n query: null,\r\n fragment: null,\r\n\r\n cannotBeABaseURL: false\r\n };\r\n\r\n const res = trimControlChars(this.input);\r\n if (res !== this.input) {\r\n this.parseError = true;\r\n }\r\n this.input = res;\r\n }\r\n\r\n const res = trimTabAndNewline(this.input);\r\n if (res !== this.input) {\r\n this.parseError = true;\r\n }\r\n this.input = res;\r\n\r\n this.state = stateOverride || \"scheme start\";\r\n\r\n this.buffer = \"\";\r\n this.atFlag = false;\r\n this.arrFlag = false;\r\n this.passwordTokenSeenFlag = false;\r\n\r\n this.input = punycode.ucs2.decode(this.input);\r\n\r\n for (; this.pointer <= this.input.length; ++this.pointer) {\r\n const c = this.input[this.pointer];\r\n const cStr = isNaN(c) ? undefined : String.fromCodePoint(c);\r\n\r\n // exec state machine\r\n const ret = this[\"parse \" + this.state](c, cStr);\r\n if (!ret) {\r\n break; // terminate algorithm\r\n } else if (ret === failure) {\r\n this.failure = true;\r\n break;\r\n }\r\n }\r\n}\r\n\r\nURLStateMachine.prototype[\"parse scheme start\"] = function parseSchemeStart(c, cStr) {\r\n if (isASCIIAlpha(c)) {\r\n this.buffer += cStr.toLowerCase();\r\n this.state = \"scheme\";\r\n } else if (!this.stateOverride) {\r\n this.state = \"no scheme\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse scheme\"] = function parseScheme(c, cStr) {\r\n if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) {\r\n this.buffer += cStr.toLowerCase();\r\n } else if (c === 58) {\r\n if (this.stateOverride) {\r\n if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) {\r\n return false;\r\n }\r\n\r\n if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) {\r\n return false;\r\n }\r\n\r\n if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === \"file\") {\r\n return false;\r\n }\r\n\r\n if (this.url.scheme === \"file\" && (this.url.host === \"\" || this.url.host === null)) {\r\n return false;\r\n }\r\n }\r\n this.url.scheme = this.buffer;\r\n this.buffer = \"\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n if (this.url.scheme === \"file\") {\r\n if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file\";\r\n } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) {\r\n this.state = \"special relative or authority\";\r\n } else if (isSpecial(this.url)) {\r\n this.state = \"special authority slashes\";\r\n } else if (this.input[this.pointer + 1] === 47) {\r\n this.state = \"path or authority\";\r\n ++this.pointer;\r\n } else {\r\n this.url.cannotBeABaseURL = true;\r\n this.url.path.push(\"\");\r\n this.state = \"cannot-be-a-base-URL path\";\r\n }\r\n } else if (!this.stateOverride) {\r\n this.buffer = \"\";\r\n this.state = \"no scheme\";\r\n this.pointer = -1;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse no scheme\"] = function parseNoScheme(c) {\r\n if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) {\r\n return failure;\r\n } else if (this.base.cannotBeABaseURL && c === 35) {\r\n this.url.scheme = this.base.scheme;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.url.cannotBeABaseURL = true;\r\n this.state = \"fragment\";\r\n } else if (this.base.scheme === \"file\") {\r\n this.state = \"file\";\r\n --this.pointer;\r\n } else {\r\n this.state = \"relative\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special relative or authority\"] = function parseSpecialRelativeOrAuthority(c) {\r\n if (c === 47 && this.input[this.pointer + 1] === 47) {\r\n this.state = \"special authority ignore slashes\";\r\n ++this.pointer;\r\n } else {\r\n this.parseError = true;\r\n this.state = \"relative\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path or authority\"] = function parsePathOrAuthority(c) {\r\n if (c === 47) {\r\n this.state = \"authority\";\r\n } else {\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse relative\"] = function parseRelative(c) {\r\n this.url.scheme = this.base.scheme;\r\n if (isNaN(c)) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n } else if (c === 47) {\r\n this.state = \"relative slash\";\r\n } else if (c === 63) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else if (isSpecial(this.url) && c === 92) {\r\n this.parseError = true;\r\n this.state = \"relative slash\";\r\n } else {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice(0, this.base.path.length - 1);\r\n\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse relative slash\"] = function parseRelativeSlash(c) {\r\n if (isSpecial(this.url) && (c === 47 || c === 92)) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"special authority ignore slashes\";\r\n } else if (c === 47) {\r\n this.state = \"authority\";\r\n } else {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special authority slashes\"] = function parseSpecialAuthoritySlashes(c) {\r\n if (c === 47 && this.input[this.pointer + 1] === 47) {\r\n this.state = \"special authority ignore slashes\";\r\n ++this.pointer;\r\n } else {\r\n this.parseError = true;\r\n this.state = \"special authority ignore slashes\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special authority ignore slashes\"] = function parseSpecialAuthorityIgnoreSlashes(c) {\r\n if (c !== 47 && c !== 92) {\r\n this.state = \"authority\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse authority\"] = function parseAuthority(c, cStr) {\r\n if (c === 64) {\r\n this.parseError = true;\r\n if (this.atFlag) {\r\n this.buffer = \"%40\" + this.buffer;\r\n }\r\n this.atFlag = true;\r\n\r\n // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars\r\n const len = countSymbols(this.buffer);\r\n for (let pointer = 0; pointer < len; ++pointer) {\r\n const codePoint = this.buffer.codePointAt(pointer);\r\n\r\n if (codePoint === 58 && !this.passwordTokenSeenFlag) {\r\n this.passwordTokenSeenFlag = true;\r\n continue;\r\n }\r\n const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode);\r\n if (this.passwordTokenSeenFlag) {\r\n this.url.password += encodedCodePoints;\r\n } else {\r\n this.url.username += encodedCodePoints;\r\n }\r\n }\r\n this.buffer = \"\";\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92)) {\r\n if (this.atFlag && this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n this.pointer -= countSymbols(this.buffer) + 1;\r\n this.buffer = \"\";\r\n this.state = \"host\";\r\n } else {\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse hostname\"] =\r\nURLStateMachine.prototype[\"parse host\"] = function parseHostName(c, cStr) {\r\n if (this.stateOverride && this.url.scheme === \"file\") {\r\n --this.pointer;\r\n this.state = \"file host\";\r\n } else if (c === 58 && !this.arrFlag) {\r\n if (this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n const host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n\r\n this.url.host = host;\r\n this.buffer = \"\";\r\n this.state = \"port\";\r\n if (this.stateOverride === \"hostname\") {\r\n return false;\r\n }\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92)) {\r\n --this.pointer;\r\n if (isSpecial(this.url) && this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n } else if (this.stateOverride && this.buffer === \"\" &&\r\n (includesCredentials(this.url) || this.url.port !== null)) {\r\n this.parseError = true;\r\n return false;\r\n }\r\n\r\n const host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n\r\n this.url.host = host;\r\n this.buffer = \"\";\r\n this.state = \"path start\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n } else {\r\n if (c === 91) {\r\n this.arrFlag = true;\r\n } else if (c === 93) {\r\n this.arrFlag = false;\r\n }\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse port\"] = function parsePort(c, cStr) {\r\n if (isASCIIDigit(c)) {\r\n this.buffer += cStr;\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92) ||\r\n this.stateOverride) {\r\n if (this.buffer !== \"\") {\r\n const port = parseInt(this.buffer);\r\n if (port > Math.pow(2, 16) - 1) {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n this.url.port = port === defaultPort(this.url.scheme) ? null : port;\r\n this.buffer = \"\";\r\n }\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n this.state = \"path start\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nconst fileOtherwiseCodePoints = new Set([47, 92, 63, 35]);\r\n\r\nURLStateMachine.prototype[\"parse file\"] = function parseFile(c) {\r\n this.url.scheme = \"file\";\r\n\r\n if (c === 47 || c === 92) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file slash\";\r\n } else if (this.base !== null && this.base.scheme === \"file\") {\r\n if (isNaN(c)) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n } else if (c === 63) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else {\r\n if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points\r\n !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) ||\r\n (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points\r\n !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n shortenPath(this.url);\r\n } else {\r\n this.parseError = true;\r\n }\r\n\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n } else {\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse file slash\"] = function parseFileSlash(c) {\r\n if (c === 47 || c === 92) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file host\";\r\n } else {\r\n if (this.base !== null && this.base.scheme === \"file\") {\r\n if (isNormalizedWindowsDriveLetterString(this.base.path[0])) {\r\n this.url.path.push(this.base.path[0]);\r\n } else {\r\n this.url.host = this.base.host;\r\n }\r\n }\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse file host\"] = function parseFileHost(c, cStr) {\r\n if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) {\r\n --this.pointer;\r\n if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) {\r\n this.parseError = true;\r\n this.state = \"path\";\r\n } else if (this.buffer === \"\") {\r\n this.url.host = \"\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n this.state = \"path start\";\r\n } else {\r\n let host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n if (host === \"localhost\") {\r\n host = \"\";\r\n }\r\n this.url.host = host;\r\n\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n\r\n this.buffer = \"\";\r\n this.state = \"path start\";\r\n }\r\n } else {\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path start\"] = function parsePathStart(c) {\r\n if (isSpecial(this.url)) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"path\";\r\n\r\n if (c !== 47 && c !== 92) {\r\n --this.pointer;\r\n }\r\n } else if (!this.stateOverride && c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (!this.stateOverride && c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else if (c !== undefined) {\r\n this.state = \"path\";\r\n if (c !== 47) {\r\n --this.pointer;\r\n }\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path\"] = function parsePath(c) {\r\n if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) ||\r\n (!this.stateOverride && (c === 63 || c === 35))) {\r\n if (isSpecial(this.url) && c === 92) {\r\n this.parseError = true;\r\n }\r\n\r\n if (isDoubleDot(this.buffer)) {\r\n shortenPath(this.url);\r\n if (c !== 47 && !(isSpecial(this.url) && c === 92)) {\r\n this.url.path.push(\"\");\r\n }\r\n } else if (isSingleDot(this.buffer) && c !== 47 &&\r\n !(isSpecial(this.url) && c === 92)) {\r\n this.url.path.push(\"\");\r\n } else if (!isSingleDot(this.buffer)) {\r\n if (this.url.scheme === \"file\" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) {\r\n if (this.url.host !== \"\" && this.url.host !== null) {\r\n this.parseError = true;\r\n this.url.host = \"\";\r\n }\r\n this.buffer = this.buffer[0] + \":\";\r\n }\r\n this.url.path.push(this.buffer);\r\n }\r\n this.buffer = \"\";\r\n if (this.url.scheme === \"file\" && (c === undefined || c === 63 || c === 35)) {\r\n while (this.url.path.length > 1 && this.url.path[0] === \"\") {\r\n this.parseError = true;\r\n this.url.path.shift();\r\n }\r\n }\r\n if (c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n }\r\n if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n }\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.buffer += percentEncodeChar(c, isPathPercentEncode);\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse cannot-be-a-base-URL path\"] = function parseCannotBeABaseURLPath(c) {\r\n if (c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else {\r\n // TODO: Add: not a URL code point\r\n if (!isNaN(c) && c !== 37) {\r\n this.parseError = true;\r\n }\r\n\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n if (!isNaN(c)) {\r\n this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode);\r\n }\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse query\"] = function parseQuery(c, cStr) {\r\n if (isNaN(c) || (!this.stateOverride && c === 35)) {\r\n if (!isSpecial(this.url) || this.url.scheme === \"ws\" || this.url.scheme === \"wss\") {\r\n this.encodingOverride = \"utf-8\";\r\n }\r\n\r\n const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead\r\n for (let i = 0; i < buffer.length; ++i) {\r\n if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 ||\r\n buffer[i] === 0x3C || buffer[i] === 0x3E) {\r\n this.url.query += percentEncode(buffer[i]);\r\n } else {\r\n this.url.query += String.fromCodePoint(buffer[i]);\r\n }\r\n }\r\n\r\n this.buffer = \"\";\r\n if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n }\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse fragment\"] = function parseFragment(c) {\r\n if (isNaN(c)) { // do nothing\r\n } else if (c === 0x0) {\r\n this.parseError = true;\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode);\r\n }\r\n\r\n return true;\r\n};\r\n\r\nfunction serializeURL(url, excludeFragment) {\r\n let output = url.scheme + \":\";\r\n if (url.host !== null) {\r\n output += \"//\";\r\n\r\n if (url.username !== \"\" || url.password !== \"\") {\r\n output += url.username;\r\n if (url.password !== \"\") {\r\n output += \":\" + url.password;\r\n }\r\n output += \"@\";\r\n }\r\n\r\n output += serializeHost(url.host);\r\n\r\n if (url.port !== null) {\r\n output += \":\" + url.port;\r\n }\r\n } else if (url.host === null && url.scheme === \"file\") {\r\n output += \"//\";\r\n }\r\n\r\n if (url.cannotBeABaseURL) {\r\n output += url.path[0];\r\n } else {\r\n for (const string of url.path) {\r\n output += \"/\" + string;\r\n }\r\n }\r\n\r\n if (url.query !== null) {\r\n output += \"?\" + url.query;\r\n }\r\n\r\n if (!excludeFragment && url.fragment !== null) {\r\n output += \"#\" + url.fragment;\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction serializeOrigin(tuple) {\r\n let result = tuple.scheme + \"://\";\r\n result += serializeHost(tuple.host);\r\n\r\n if (tuple.port !== null) {\r\n result += \":\" + tuple.port;\r\n }\r\n\r\n return result;\r\n}\r\n\r\nmodule.exports.serializeURL = serializeURL;\r\n\r\nmodule.exports.serializeURLOrigin = function (url) {\r\n // https://url.spec.whatwg.org/#concept-url-origin\r\n switch (url.scheme) {\r\n case \"blob\":\r\n try {\r\n return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0]));\r\n } catch (e) {\r\n // serializing an opaque origin returns \"null\"\r\n return \"null\";\r\n }\r\n case \"ftp\":\r\n case \"gopher\":\r\n case \"http\":\r\n case \"https\":\r\n case \"ws\":\r\n case \"wss\":\r\n return serializeOrigin({\r\n scheme: url.scheme,\r\n host: url.host,\r\n port: url.port\r\n });\r\n case \"file\":\r\n // spec says \"exercise to the reader\", chrome says \"file://\"\r\n return \"file://\";\r\n default:\r\n // serializing an opaque origin returns \"null\"\r\n return \"null\";\r\n }\r\n};\r\n\r\nmodule.exports.basicURLParse = function (input, options) {\r\n if (options === undefined) {\r\n options = {};\r\n }\r\n\r\n const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride);\r\n if (usm.failure) {\r\n return \"failure\";\r\n }\r\n\r\n return usm.url;\r\n};\r\n\r\nmodule.exports.setTheUsername = function (url, username) {\r\n url.username = \"\";\r\n const decoded = punycode.ucs2.decode(username);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode);\r\n }\r\n};\r\n\r\nmodule.exports.setThePassword = function (url, password) {\r\n url.password = \"\";\r\n const decoded = punycode.ucs2.decode(password);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode);\r\n }\r\n};\r\n\r\nmodule.exports.serializeHost = serializeHost;\r\n\r\nmodule.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort;\r\n\r\nmodule.exports.serializeInteger = function (integer) {\r\n return String(integer);\r\n};\r\n\r\nmodule.exports.parseURL = function (input, options) {\r\n if (options === undefined) {\r\n options = {};\r\n }\r\n\r\n // We don't handle blobs, so this just delegates:\r\n return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride });\r\n};\r\n","\"use strict\";\n\nmodule.exports.mixin = function mixin(target, source) {\n const keys = Object.getOwnPropertyNames(source);\n for (let i = 0; i < keys.length; ++i) {\n Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i]));\n }\n};\n\nmodule.exports.wrapperSymbol = Symbol(\"wrapper\");\nmodule.exports.implSymbol = Symbol(\"impl\");\n\nmodule.exports.wrapperForImpl = function (impl) {\n return impl[module.exports.wrapperSymbol];\n};\n\nmodule.exports.implForWrapper = function (wrapper) {\n return wrapper[module.exports.implSymbol];\n};\n\n","var wrappy = require('wrappy')\nmodule.exports = wrappy(once)\nmodule.exports.strict = wrappy(onceStrict)\n\nonce.proto = once(function () {\n Object.defineProperty(Function.prototype, 'once', {\n value: function () {\n return once(this)\n },\n configurable: true\n })\n\n Object.defineProperty(Function.prototype, 'onceStrict', {\n value: function () {\n return onceStrict(this)\n },\n configurable: true\n })\n})\n\nfunction once (fn) {\n var f = function () {\n if (f.called) return f.value\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n f.called = false\n return f\n}\n\nfunction onceStrict (fn) {\n var f = function () {\n if (f.called)\n throw new Error(f.onceError)\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n var name = fn.name || 'Function wrapped with `once`'\n f.onceError = name + \" shouldn't be called more than once\"\n f.called = false\n return f\n}\n","module.exports = require('./lib/tunnel');\n","'use strict';\n\nvar net = require('net');\nvar tls = require('tls');\nvar http = require('http');\nvar https = require('https');\nvar events = require('events');\nvar assert = require('assert');\nvar util = require('util');\n\n\nexports.httpOverHttp = httpOverHttp;\nexports.httpsOverHttp = httpsOverHttp;\nexports.httpOverHttps = httpOverHttps;\nexports.httpsOverHttps = httpsOverHttps;\n\n\nfunction httpOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n return agent;\n}\n\nfunction httpsOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\nfunction httpOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n return agent;\n}\n\nfunction httpsOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\n\nfunction TunnelingAgent(options) {\n var self = this;\n self.options = options || {};\n self.proxyOptions = self.options.proxy || {};\n self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;\n self.requests = [];\n self.sockets = [];\n\n self.on('free', function onFree(socket, host, port, localAddress) {\n var options = toOptions(host, port, localAddress);\n for (var i = 0, len = self.requests.length; i < len; ++i) {\n var pending = self.requests[i];\n if (pending.host === options.host && pending.port === options.port) {\n // Detect the request to connect same origin server,\n // reuse the connection.\n self.requests.splice(i, 1);\n pending.request.onSocket(socket);\n return;\n }\n }\n socket.destroy();\n self.removeSocket(socket);\n });\n}\nutil.inherits(TunnelingAgent, events.EventEmitter);\n\nTunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {\n var self = this;\n var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));\n\n if (self.sockets.length >= this.maxSockets) {\n // We are over limit so we'll add it to the queue.\n self.requests.push(options);\n return;\n }\n\n // If we are under maxSockets create a new one.\n self.createSocket(options, function(socket) {\n socket.on('free', onFree);\n socket.on('close', onCloseOrRemove);\n socket.on('agentRemove', onCloseOrRemove);\n req.onSocket(socket);\n\n function onFree() {\n self.emit('free', socket, options);\n }\n\n function onCloseOrRemove(err) {\n self.removeSocket(socket);\n socket.removeListener('free', onFree);\n socket.removeListener('close', onCloseOrRemove);\n socket.removeListener('agentRemove', onCloseOrRemove);\n }\n });\n};\n\nTunnelingAgent.prototype.createSocket = function createSocket(options, cb) {\n var self = this;\n var placeholder = {};\n self.sockets.push(placeholder);\n\n var connectOptions = mergeOptions({}, self.proxyOptions, {\n method: 'CONNECT',\n path: options.host + ':' + options.port,\n agent: false,\n headers: {\n host: options.host + ':' + options.port\n }\n });\n if (options.localAddress) {\n connectOptions.localAddress = options.localAddress;\n }\n if (connectOptions.proxyAuth) {\n connectOptions.headers = connectOptions.headers || {};\n connectOptions.headers['Proxy-Authorization'] = 'Basic ' +\n new Buffer(connectOptions.proxyAuth).toString('base64');\n }\n\n debug('making CONNECT request');\n var connectReq = self.request(connectOptions);\n connectReq.useChunkedEncodingByDefault = false; // for v0.6\n connectReq.once('response', onResponse); // for v0.6\n connectReq.once('upgrade', onUpgrade); // for v0.6\n connectReq.once('connect', onConnect); // for v0.7 or later\n connectReq.once('error', onError);\n connectReq.end();\n\n function onResponse(res) {\n // Very hacky. This is necessary to avoid http-parser leaks.\n res.upgrade = true;\n }\n\n function onUpgrade(res, socket, head) {\n // Hacky.\n process.nextTick(function() {\n onConnect(res, socket, head);\n });\n }\n\n function onConnect(res, socket, head) {\n connectReq.removeAllListeners();\n socket.removeAllListeners();\n\n if (res.statusCode !== 200) {\n debug('tunneling socket could not be established, statusCode=%d',\n res.statusCode);\n socket.destroy();\n var error = new Error('tunneling socket could not be established, ' +\n 'statusCode=' + res.statusCode);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n if (head.length > 0) {\n debug('got illegal response body from proxy');\n socket.destroy();\n var error = new Error('got illegal response body from proxy');\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n debug('tunneling connection has established');\n self.sockets[self.sockets.indexOf(placeholder)] = socket;\n return cb(socket);\n }\n\n function onError(cause) {\n connectReq.removeAllListeners();\n\n debug('tunneling socket could not be established, cause=%s\\n',\n cause.message, cause.stack);\n var error = new Error('tunneling socket could not be established, ' +\n 'cause=' + cause.message);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n }\n};\n\nTunnelingAgent.prototype.removeSocket = function removeSocket(socket) {\n var pos = this.sockets.indexOf(socket)\n if (pos === -1) {\n return;\n }\n this.sockets.splice(pos, 1);\n\n var pending = this.requests.shift();\n if (pending) {\n // If we have pending requests and a socket gets closed a new one\n // needs to be created to take over in the pool for the one that closed.\n this.createSocket(pending, function(socket) {\n pending.request.onSocket(socket);\n });\n }\n};\n\nfunction createSecureSocket(options, cb) {\n var self = this;\n TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {\n var hostHeader = options.request.getHeader('host');\n var tlsOptions = mergeOptions({}, self.options, {\n socket: socket,\n servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host\n });\n\n // 0 is dummy port for v0.6\n var secureSocket = tls.connect(0, tlsOptions);\n self.sockets[self.sockets.indexOf(socket)] = secureSocket;\n cb(secureSocket);\n });\n}\n\n\nfunction toOptions(host, port, localAddress) {\n if (typeof host === 'string') { // since v0.10\n return {\n host: host,\n port: port,\n localAddress: localAddress\n };\n }\n return host; // for v0.11 or later\n}\n\nfunction mergeOptions(target) {\n for (var i = 1, len = arguments.length; i < len; ++i) {\n var overrides = arguments[i];\n if (typeof overrides === 'object') {\n var keys = Object.keys(overrides);\n for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {\n var k = keys[j];\n if (overrides[k] !== undefined) {\n target[k] = overrides[k];\n }\n }\n }\n }\n return target;\n}\n\n\nvar debug;\nif (process.env.NODE_DEBUG && /\\btunnel\\b/.test(process.env.NODE_DEBUG)) {\n debug = function() {\n var args = Array.prototype.slice.call(arguments);\n if (typeof args[0] === 'string') {\n args[0] = 'TUNNEL: ' + args[0];\n } else {\n args.unshift('TUNNEL:');\n }\n console.error.apply(console, args);\n }\n} else {\n debug = function() {};\n}\nexports.debug = debug; // for test\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction getUserAgent() {\n if (typeof navigator === \"object\" && \"userAgent\" in navigator) {\n return navigator.userAgent;\n }\n\n if (typeof process === \"object\" && \"version\" in process) {\n return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`;\n }\n\n return \"\";\n}\n\nexports.getUserAgent = getUserAgent;\n//# sourceMappingURL=index.js.map\n","// Returns a wrapper function that returns a wrapped callback\n// The wrapper function should do some stuff, and return a\n// presumably different callback function.\n// This makes sure that own properties are retained, so that\n// decorations and such are not lost along the way.\nmodule.exports = wrappy\nfunction wrappy (fn, cb) {\n if (fn && cb) return wrappy(fn)(cb)\n\n if (typeof fn !== 'function')\n throw new TypeError('need wrapper function')\n\n Object.keys(fn).forEach(function (k) {\n wrapper[k] = fn[k]\n })\n\n return wrapper\n\n function wrapper() {\n var args = new Array(arguments.length)\n for (var i = 0; i < args.length; i++) {\n args[i] = arguments[i]\n }\n var ret = fn.apply(this, args)\n var cb = args[args.length-1]\n if (typeof ret === 'function' && ret !== cb) {\n Object.keys(cb).forEach(function (k) {\n ret[k] = cb[k]\n })\n }\n return ret\n }\n}\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.setErrorMap = exports.overrideErrorMap = exports.defaultErrorMap = exports.ZodError = exports.quotelessJson = exports.ZodIssueCode = void 0;\nconst util_1 = require(\"./helpers/util\");\nexports.ZodIssueCode = util_1.util.arrayToEnum([\n \"invalid_type\",\n \"invalid_literal\",\n \"custom\",\n \"invalid_union\",\n \"invalid_union_discriminator\",\n \"invalid_enum_value\",\n \"unrecognized_keys\",\n \"invalid_arguments\",\n \"invalid_return_type\",\n \"invalid_date\",\n \"invalid_string\",\n \"too_small\",\n \"too_big\",\n \"invalid_intersection_types\",\n \"not_multiple_of\",\n]);\nconst quotelessJson = (obj) => {\n const json = JSON.stringify(obj, null, 2);\n return json.replace(/\"([^\"]+)\":/g, \"$1:\");\n};\nexports.quotelessJson = quotelessJson;\nclass ZodError extends Error {\n constructor(issues) {\n super();\n this.issues = [];\n this.addIssue = (sub) => {\n this.issues = [...this.issues, sub];\n };\n this.addIssues = (subs = []) => {\n this.issues = [...this.issues, ...subs];\n };\n const actualProto = new.target.prototype;\n if (Object.setPrototypeOf) {\n // eslint-disable-next-line ban/ban\n Object.setPrototypeOf(this, actualProto);\n }\n else {\n this.__proto__ = actualProto;\n }\n this.name = \"ZodError\";\n this.issues = issues;\n }\n get errors() {\n return this.issues;\n }\n format(_mapper) {\n const mapper = _mapper ||\n function (issue) {\n return issue.message;\n };\n const fieldErrors = { _errors: [] };\n const processError = (error) => {\n for (const issue of error.issues) {\n if (issue.code === \"invalid_union\") {\n issue.unionErrors.map(processError);\n }\n else if (issue.code === \"invalid_return_type\") {\n processError(issue.returnTypeError);\n }\n else if (issue.code === \"invalid_arguments\") {\n processError(issue.argumentsError);\n }\n else if (issue.path.length === 0) {\n fieldErrors._errors.push(mapper(issue));\n }\n else {\n let curr = fieldErrors;\n let i = 0;\n while (i < issue.path.length) {\n const el = issue.path[i];\n const terminal = i === issue.path.length - 1;\n if (!terminal) {\n curr[el] = curr[el] || { _errors: [] };\n // if (typeof el === \"string\") {\n // curr[el] = curr[el] || { _errors: [] };\n // } else if (typeof el === \"number\") {\n // const errorArray: any = [];\n // errorArray._errors = [];\n // curr[el] = curr[el] || errorArray;\n // }\n }\n else {\n curr[el] = curr[el] || { _errors: [] };\n curr[el]._errors.push(mapper(issue));\n }\n curr = curr[el];\n i++;\n }\n }\n }\n };\n processError(this);\n return fieldErrors;\n }\n toString() {\n return this.message;\n }\n get message() {\n return JSON.stringify(this.issues, null, 2);\n }\n get isEmpty() {\n return this.issues.length === 0;\n }\n flatten(mapper = (issue) => issue.message) {\n const fieldErrors = {};\n const formErrors = [];\n for (const sub of this.issues) {\n if (sub.path.length > 0) {\n fieldErrors[sub.path[0]] = fieldErrors[sub.path[0]] || [];\n fieldErrors[sub.path[0]].push(mapper(sub));\n }\n else {\n formErrors.push(mapper(sub));\n }\n }\n return { formErrors, fieldErrors };\n }\n get formErrors() {\n return this.flatten();\n }\n}\nexports.ZodError = ZodError;\nZodError.create = (issues) => {\n const error = new ZodError(issues);\n return error;\n};\nconst defaultErrorMap = (issue, _ctx) => {\n let message;\n switch (issue.code) {\n case exports.ZodIssueCode.invalid_type:\n if (issue.received === util_1.ZodParsedType.undefined) {\n message = \"Required\";\n }\n else {\n message = `Expected ${issue.expected}, received ${issue.received}`;\n }\n break;\n case exports.ZodIssueCode.invalid_literal:\n message = `Invalid literal value, expected ${JSON.stringify(issue.expected)}`;\n break;\n case exports.ZodIssueCode.unrecognized_keys:\n message = `Unrecognized key(s) in object: ${util_1.util.joinValues(issue.keys, \", \")}`;\n break;\n case exports.ZodIssueCode.invalid_union:\n message = `Invalid input`;\n break;\n case exports.ZodIssueCode.invalid_union_discriminator:\n message = `Invalid discriminator value. Expected ${util_1.util.joinValues(issue.options)}`;\n break;\n case exports.ZodIssueCode.invalid_enum_value:\n message = `Invalid enum value. Expected ${util_1.util.joinValues(issue.options)}, received '${issue.received}'`;\n break;\n case exports.ZodIssueCode.invalid_arguments:\n message = `Invalid function arguments`;\n break;\n case exports.ZodIssueCode.invalid_return_type:\n message = `Invalid function return type`;\n break;\n case exports.ZodIssueCode.invalid_date:\n message = `Invalid date`;\n break;\n case exports.ZodIssueCode.invalid_string:\n if (issue.validation !== \"regex\")\n message = `Invalid ${issue.validation}`;\n else\n message = \"Invalid\";\n break;\n case exports.ZodIssueCode.too_small:\n if (issue.type === \"array\")\n message = `Array must contain ${issue.inclusive ? `at least` : `more than`} ${issue.minimum} element(s)`;\n else if (issue.type === \"string\")\n message = `String must contain ${issue.inclusive ? `at least` : `over`} ${issue.minimum} character(s)`;\n else if (issue.type === \"number\")\n message = `Number must be greater than ${issue.inclusive ? `or equal to ` : ``}${issue.minimum}`;\n else\n message = \"Invalid input\";\n break;\n case exports.ZodIssueCode.too_big:\n if (issue.type === \"array\")\n message = `Array must contain ${issue.inclusive ? `at most` : `less than`} ${issue.maximum} element(s)`;\n else if (issue.type === \"string\")\n message = `String must contain ${issue.inclusive ? `at most` : `under`} ${issue.maximum} character(s)`;\n else if (issue.type === \"number\")\n message = `Number must be less than ${issue.inclusive ? `or equal to ` : ``}${issue.maximum}`;\n else\n message = \"Invalid input\";\n break;\n case exports.ZodIssueCode.custom:\n message = `Invalid input`;\n break;\n case exports.ZodIssueCode.invalid_intersection_types:\n message = `Intersection results could not be merged`;\n break;\n case exports.ZodIssueCode.not_multiple_of:\n message = `Number must be a multiple of ${issue.multipleOf}`;\n break;\n default:\n message = _ctx.defaultError;\n util_1.util.assertNever(issue);\n }\n return { message };\n};\nexports.defaultErrorMap = defaultErrorMap;\nexports.overrideErrorMap = exports.defaultErrorMap;\nconst setErrorMap = (map) => {\n exports.overrideErrorMap = map;\n};\nexports.setErrorMap = setErrorMap;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ZodParsedType = exports.getParsedType = void 0;\n__exportStar(require(\"./helpers/parseUtil\"), exports);\n__exportStar(require(\"./helpers/typeAliases\"), exports);\nvar util_1 = require(\"./helpers/util\");\nObject.defineProperty(exports, \"getParsedType\", { enumerable: true, get: function () { return util_1.getParsedType; } });\nObject.defineProperty(exports, \"ZodParsedType\", { enumerable: true, get: function () { return util_1.ZodParsedType; } });\n__exportStar(require(\"./types\"), exports);\n__exportStar(require(\"./ZodError\"), exports);\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.errorUtil = void 0;\nvar errorUtil;\n(function (errorUtil) {\n errorUtil.errToObj = (message) => typeof message === \"string\" ? { message } : message || {};\n errorUtil.toString = (message) => typeof message === \"string\" ? message : message === null || message === void 0 ? void 0 : message.message;\n})(errorUtil = exports.errorUtil || (exports.errorUtil = {}));\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.isAsync = exports.isValid = exports.isDirty = exports.isAborted = exports.OK = exports.DIRTY = exports.INVALID = exports.ParseStatus = exports.addIssueToContext = exports.EMPTY_PATH = exports.makeIssue = void 0;\nconst ZodError_1 = require(\"../ZodError\");\nconst makeIssue = (params) => {\n const { data, path, errorMaps, issueData } = params;\n const fullPath = [...path, ...(issueData.path || [])];\n const fullIssue = {\n ...issueData,\n path: fullPath,\n };\n let errorMessage = \"\";\n const maps = errorMaps\n .filter((m) => !!m)\n .slice()\n .reverse();\n for (const map of maps) {\n errorMessage = map(fullIssue, { data, defaultError: errorMessage }).message;\n }\n return {\n ...issueData,\n path: fullPath,\n message: issueData.message || errorMessage,\n };\n};\nexports.makeIssue = makeIssue;\nexports.EMPTY_PATH = [];\nfunction addIssueToContext(ctx, issueData) {\n const issue = exports.makeIssue({\n issueData: issueData,\n data: ctx.data,\n path: ctx.path,\n errorMaps: [\n ctx.common.contextualErrorMap,\n ctx.schemaErrorMap,\n ZodError_1.overrideErrorMap,\n ZodError_1.defaultErrorMap,\n ].filter((x) => !!x),\n });\n ctx.common.issues.push(issue);\n}\nexports.addIssueToContext = addIssueToContext;\nclass ParseStatus {\n constructor() {\n this.value = \"valid\";\n }\n dirty() {\n if (this.value === \"valid\")\n this.value = \"dirty\";\n }\n abort() {\n if (this.value !== \"aborted\")\n this.value = \"aborted\";\n }\n static mergeArray(status, results) {\n const arrayValue = [];\n for (const s of results) {\n if (s.status === \"aborted\")\n return exports.INVALID;\n if (s.status === \"dirty\")\n status.dirty();\n arrayValue.push(s.value);\n }\n return { status: status.value, value: arrayValue };\n }\n static async mergeObjectAsync(status, pairs) {\n const syncPairs = [];\n for (const pair of pairs) {\n syncPairs.push({\n key: await pair.key,\n value: await pair.value,\n });\n }\n return ParseStatus.mergeObjectSync(status, syncPairs);\n }\n static mergeObjectSync(status, pairs) {\n const finalObject = {};\n for (const pair of pairs) {\n const { key, value } = pair;\n if (key.status === \"aborted\")\n return exports.INVALID;\n if (value.status === \"aborted\")\n return exports.INVALID;\n if (key.status === \"dirty\")\n status.dirty();\n if (value.status === \"dirty\")\n status.dirty();\n if (typeof value.value !== \"undefined\" || pair.alwaysSet) {\n finalObject[key.value] = value.value;\n }\n }\n return { status: status.value, value: finalObject };\n }\n}\nexports.ParseStatus = ParseStatus;\nexports.INVALID = Object.freeze({\n status: \"aborted\",\n});\nconst DIRTY = (value) => ({ status: \"dirty\", value });\nexports.DIRTY = DIRTY;\nconst OK = (value) => ({ status: \"valid\", value });\nexports.OK = OK;\nconst isAborted = (x) => x.status === \"aborted\";\nexports.isAborted = isAborted;\nconst isDirty = (x) => x.status === \"dirty\";\nexports.isDirty = isDirty;\nconst isValid = (x) => x.status === \"valid\";\nexports.isValid = isValid;\nconst isAsync = (x) => typeof Promise !== undefined && x instanceof Promise;\nexports.isAsync = isAsync;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getParsedType = exports.ZodParsedType = exports.util = void 0;\nvar util;\n(function (util) {\n function assertNever(_x) {\n throw new Error();\n }\n util.assertNever = assertNever;\n util.arrayToEnum = (items) => {\n const obj = {};\n for (const item of items) {\n obj[item] = item;\n }\n return obj;\n };\n util.getValidEnumValues = (obj) => {\n const validKeys = util.objectKeys(obj).filter((k) => typeof obj[obj[k]] !== \"number\");\n const filtered = {};\n for (const k of validKeys) {\n filtered[k] = obj[k];\n }\n return util.objectValues(filtered);\n };\n util.objectValues = (obj) => {\n return util.objectKeys(obj).map(function (e) {\n return obj[e];\n });\n };\n util.objectKeys = typeof Object.keys === \"function\" // eslint-disable-line ban/ban\n ? (obj) => Object.keys(obj) // eslint-disable-line ban/ban\n : (object) => {\n const keys = [];\n for (const key in object) {\n if (Object.prototype.hasOwnProperty.call(object, key)) {\n keys.push(key);\n }\n }\n return keys;\n };\n util.find = (arr, checker) => {\n for (const item of arr) {\n if (checker(item))\n return item;\n }\n return undefined;\n };\n util.isInteger = typeof Number.isInteger === \"function\"\n ? (val) => Number.isInteger(val) // eslint-disable-line ban/ban\n : (val) => typeof val === \"number\" && isFinite(val) && Math.floor(val) === val;\n function joinValues(array, separator = \" | \") {\n return array\n .map((val) => (typeof val === \"string\" ? `'${val}'` : val))\n .join(separator);\n }\n util.joinValues = joinValues;\n})(util = exports.util || (exports.util = {}));\nexports.ZodParsedType = util.arrayToEnum([\n \"string\",\n \"nan\",\n \"number\",\n \"integer\",\n \"float\",\n \"boolean\",\n \"date\",\n \"bigint\",\n \"symbol\",\n \"function\",\n \"undefined\",\n \"null\",\n \"array\",\n \"object\",\n \"unknown\",\n \"promise\",\n \"void\",\n \"never\",\n \"map\",\n \"set\",\n]);\nconst getParsedType = (data) => {\n const t = typeof data;\n switch (t) {\n case \"undefined\":\n return exports.ZodParsedType.undefined;\n case \"string\":\n return exports.ZodParsedType.string;\n case \"number\":\n return isNaN(data) ? exports.ZodParsedType.nan : exports.ZodParsedType.number;\n case \"boolean\":\n return exports.ZodParsedType.boolean;\n case \"function\":\n return exports.ZodParsedType.function;\n case \"bigint\":\n return exports.ZodParsedType.bigint;\n case \"object\":\n if (Array.isArray(data)) {\n return exports.ZodParsedType.array;\n }\n if (data === null) {\n return exports.ZodParsedType.null;\n }\n if (data.then &&\n typeof data.then === \"function\" &&\n data.catch &&\n typeof data.catch === \"function\") {\n return exports.ZodParsedType.promise;\n }\n if (typeof Map !== \"undefined\" && data instanceof Map) {\n return exports.ZodParsedType.map;\n }\n if (typeof Set !== \"undefined\" && data instanceof Set) {\n return exports.ZodParsedType.set;\n }\n if (typeof Date !== \"undefined\" && data instanceof Date) {\n return exports.ZodParsedType.date;\n }\n return exports.ZodParsedType.object;\n default:\n return exports.ZodParsedType.unknown;\n }\n};\nexports.getParsedType = getParsedType;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.z = void 0;\nconst mod = __importStar(require(\"./external\"));\nexports.z = mod;\n__exportStar(require(\"./external\"), exports);\nexports.default = mod;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.intersection = exports.instanceof = exports.function = exports.enum = exports.effect = exports.discriminatedUnion = exports.date = exports.boolean = exports.bigint = exports.array = exports.any = exports.ZodFirstPartyTypeKind = exports.late = exports.ZodSchema = exports.Schema = exports.custom = exports.ZodNaN = exports.ZodDefault = exports.ZodNullable = exports.ZodOptional = exports.ZodTransformer = exports.ZodEffects = exports.ZodPromise = exports.ZodNativeEnum = exports.ZodEnum = exports.ZodLiteral = exports.ZodLazy = exports.ZodFunction = exports.ZodSet = exports.ZodMap = exports.ZodRecord = exports.ZodTuple = exports.ZodIntersection = exports.ZodDiscriminatedUnion = exports.ZodUnion = exports.ZodObject = exports.objectUtil = exports.ZodArray = exports.ZodVoid = exports.ZodNever = exports.ZodUnknown = exports.ZodAny = exports.ZodNull = exports.ZodUndefined = exports.ZodDate = exports.ZodBoolean = exports.ZodBigInt = exports.ZodNumber = exports.ZodString = exports.ZodType = void 0;\nexports.void = exports.unknown = exports.union = exports.undefined = exports.tuple = exports.transformer = exports.string = exports.strictObject = exports.set = exports.record = exports.promise = exports.preprocess = exports.ostring = exports.optional = exports.onumber = exports.oboolean = exports.object = exports.number = exports.nullable = exports.null = exports.never = exports.nativeEnum = exports.nan = exports.map = exports.literal = exports.lazy = void 0;\nconst errorUtil_1 = require(\"./helpers/errorUtil\");\nconst parseUtil_1 = require(\"./helpers/parseUtil\");\nconst util_1 = require(\"./helpers/util\");\nconst ZodError_1 = require(\"./ZodError\");\nclass ParseInputLazyPath {\n constructor(parent, value, path, key) {\n this.parent = parent;\n this.data = value;\n this._path = path;\n this._key = key;\n }\n get path() {\n return this._path.concat(this._key);\n }\n}\nconst handleResult = (ctx, result) => {\n if (parseUtil_1.isValid(result)) {\n return { success: true, data: result.value };\n }\n else {\n if (!ctx.common.issues.length) {\n throw new Error(\"Validation failed but no issues detected.\");\n }\n const error = new ZodError_1.ZodError(ctx.common.issues);\n return { success: false, error };\n }\n};\nfunction processCreateParams(params) {\n if (!params)\n return {};\n const { errorMap, invalid_type_error, required_error, description } = params;\n if (errorMap && (invalid_type_error || required_error)) {\n throw new Error(`Can't use \"invalid\" or \"required\" in conjunction with custom error map.`);\n }\n if (errorMap)\n return { errorMap: errorMap, description };\n const customMap = (iss, ctx) => {\n if (iss.code !== \"invalid_type\")\n return { message: ctx.defaultError };\n if (typeof ctx.data === \"undefined\" && required_error)\n return { message: required_error };\n if (params.invalid_type_error)\n return { message: params.invalid_type_error };\n return { message: ctx.defaultError };\n };\n return { errorMap: customMap, description };\n}\nclass ZodType {\n constructor(def) {\n /** Alias of safeParseAsync */\n this.spa = this.safeParseAsync;\n this.superRefine = this._refinement;\n this._def = def;\n this.parse = this.parse.bind(this);\n this.safeParse = this.safeParse.bind(this);\n this.parseAsync = this.parseAsync.bind(this);\n this.safeParseAsync = this.safeParseAsync.bind(this);\n this.spa = this.spa.bind(this);\n this.refine = this.refine.bind(this);\n this.refinement = this.refinement.bind(this);\n this.superRefine = this.superRefine.bind(this);\n this.optional = this.optional.bind(this);\n this.nullable = this.nullable.bind(this);\n this.nullish = this.nullish.bind(this);\n this.array = this.array.bind(this);\n this.promise = this.promise.bind(this);\n this.or = this.or.bind(this);\n this.and = this.and.bind(this);\n this.transform = this.transform.bind(this);\n this.default = this.default.bind(this);\n this.describe = this.describe.bind(this);\n this.isNullable = this.isNullable.bind(this);\n this.isOptional = this.isOptional.bind(this);\n }\n get description() {\n return this._def.description;\n }\n _getType(input) {\n return util_1.getParsedType(input.data);\n }\n _getOrReturnCtx(input, ctx) {\n return (ctx || {\n common: input.parent.common,\n data: input.data,\n parsedType: util_1.getParsedType(input.data),\n schemaErrorMap: this._def.errorMap,\n path: input.path,\n parent: input.parent,\n });\n }\n _processInputParams(input) {\n return {\n status: new parseUtil_1.ParseStatus(),\n ctx: {\n common: input.parent.common,\n data: input.data,\n parsedType: util_1.getParsedType(input.data),\n schemaErrorMap: this._def.errorMap,\n path: input.path,\n parent: input.parent,\n },\n };\n }\n _parseSync(input) {\n const result = this._parse(input);\n if (parseUtil_1.isAsync(result)) {\n throw new Error(\"Synchronous parse encountered promise.\");\n }\n return result;\n }\n _parseAsync(input) {\n const result = this._parse(input);\n return Promise.resolve(result);\n }\n parse(data, params) {\n const result = this.safeParse(data, params);\n if (result.success)\n return result.data;\n throw result.error;\n }\n safeParse(data, params) {\n var _a;\n const ctx = {\n common: {\n issues: [],\n async: (_a = params === null || params === void 0 ? void 0 : params.async) !== null && _a !== void 0 ? _a : false,\n contextualErrorMap: params === null || params === void 0 ? void 0 : params.errorMap,\n },\n path: (params === null || params === void 0 ? void 0 : params.path) || [],\n schemaErrorMap: this._def.errorMap,\n parent: null,\n data,\n parsedType: util_1.getParsedType(data),\n };\n const result = this._parseSync({ data, path: ctx.path, parent: ctx });\n return handleResult(ctx, result);\n }\n async parseAsync(data, params) {\n const result = await this.safeParseAsync(data, params);\n if (result.success)\n return result.data;\n throw result.error;\n }\n async safeParseAsync(data, params) {\n const ctx = {\n common: {\n issues: [],\n contextualErrorMap: params === null || params === void 0 ? void 0 : params.errorMap,\n async: true,\n },\n path: (params === null || params === void 0 ? void 0 : params.path) || [],\n schemaErrorMap: this._def.errorMap,\n parent: null,\n data,\n parsedType: util_1.getParsedType(data),\n };\n const maybeAsyncResult = this._parse({ data, path: [], parent: ctx });\n const result = await (parseUtil_1.isAsync(maybeAsyncResult)\n ? maybeAsyncResult\n : Promise.resolve(maybeAsyncResult));\n return handleResult(ctx, result);\n }\n refine(check, message) {\n const getIssueProperties = (val) => {\n if (typeof message === \"string\" || typeof message === \"undefined\") {\n return { message };\n }\n else if (typeof message === \"function\") {\n return message(val);\n }\n else {\n return message;\n }\n };\n return this._refinement((val, ctx) => {\n const result = check(val);\n const setError = () => ctx.addIssue({\n code: ZodError_1.ZodIssueCode.custom,\n ...getIssueProperties(val),\n });\n if (typeof Promise !== \"undefined\" && result instanceof Promise) {\n return result.then((data) => {\n if (!data) {\n setError();\n return false;\n }\n else {\n return true;\n }\n });\n }\n if (!result) {\n setError();\n return false;\n }\n else {\n return true;\n }\n });\n }\n refinement(check, refinementData) {\n return this._refinement((val, ctx) => {\n if (!check(val)) {\n ctx.addIssue(typeof refinementData === \"function\"\n ? refinementData(val, ctx)\n : refinementData);\n return false;\n }\n else {\n return true;\n }\n });\n }\n _refinement(refinement) {\n return new ZodEffects({\n schema: this,\n typeName: ZodFirstPartyTypeKind.ZodEffects,\n effect: { type: \"refinement\", refinement },\n });\n }\n optional() {\n return ZodOptional.create(this);\n }\n nullable() {\n return ZodNullable.create(this);\n }\n nullish() {\n return this.optional().nullable();\n }\n array() {\n return ZodArray.create(this);\n }\n promise() {\n return ZodPromise.create(this);\n }\n or(option) {\n return ZodUnion.create([this, option]);\n }\n and(incoming) {\n return ZodIntersection.create(this, incoming);\n }\n transform(transform) {\n return new ZodEffects({\n schema: this,\n typeName: ZodFirstPartyTypeKind.ZodEffects,\n effect: { type: \"transform\", transform },\n });\n }\n default(def) {\n const defaultValueFunc = typeof def === \"function\" ? def : () => def;\n return new ZodDefault({\n innerType: this,\n defaultValue: defaultValueFunc,\n typeName: ZodFirstPartyTypeKind.ZodDefault,\n });\n }\n describe(description) {\n const This = this.constructor;\n return new This({\n ...this._def,\n description,\n });\n }\n isOptional() {\n return this.safeParse(undefined).success;\n }\n isNullable() {\n return this.safeParse(null).success;\n }\n}\nexports.ZodType = ZodType;\nexports.Schema = ZodType;\nexports.ZodSchema = ZodType;\nconst cuidRegex = /^c[^\\s-]{8,}$/i;\nconst uuidRegex = /^([a-f0-9]{8}-[a-f0-9]{4}-[1-5][a-f0-9]{3}-[a-f0-9]{4}-[a-f0-9]{12}|00000000-0000-0000-0000-000000000000)$/i;\n// from https://stackoverflow.com/a/46181/1550155\n// old version: too slow, didn't support unicode\n// const emailRegex = /^((([a-z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])+(\\.([a-z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])+)*)|((\\x22)((((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(([\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x7f]|\\x21|[\\x23-\\x5b]|[\\x5d-\\x7e]|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])|(\\\\([\\x01-\\x09\\x0b\\x0c\\x0d-\\x7f]|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF]))))*(((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(\\x22)))@((([a-z]|\\d|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])|(([a-z]|\\d|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])([a-z]|\\d|-|\\.|_|~|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])*([a-z]|\\d|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])))\\.)+(([a-z]|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])|(([a-z]|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])([a-z]|\\d|-|\\.|_|~|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])*([a-z]|[\\u00A0-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFEF])))$/i;\n// eslint-disable-next-line\nconst emailRegex = /^(([^<>()[\\]\\.,;:\\s@\\\"]+(\\.[^<>()[\\]\\.,;:\\s@\\\"]+)*)|(\\\".+\\\"))@(([^<>()[\\]\\.,;:\\s@\\\"]+\\.)+[^<>()[\\]\\.,;:\\s@\\\"]{2,})$/i;\nclass ZodString extends ZodType {\n constructor() {\n super(...arguments);\n this._regex = (regex, validation, message) => this.refinement((data) => regex.test(data), {\n validation,\n code: ZodError_1.ZodIssueCode.invalid_string,\n ...errorUtil_1.errorUtil.errToObj(message),\n });\n /**\n * @deprecated Use z.string().min(1) instead.\n * @see {@link ZodString.min}\n */\n this.nonempty = (message) => this.min(1, errorUtil_1.errorUtil.errToObj(message));\n this.trim = () => new ZodString({\n ...this._def,\n checks: [...this._def.checks, { kind: \"trim\" }],\n });\n }\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== util_1.ZodParsedType.string) {\n const ctx = this._getOrReturnCtx(input);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.string,\n received: ctx.parsedType,\n }\n //\n );\n return parseUtil_1.INVALID;\n }\n const status = new parseUtil_1.ParseStatus();\n let ctx = undefined;\n for (const check of this._def.checks) {\n if (check.kind === \"min\") {\n if (input.data.length < check.value) {\n ctx = this._getOrReturnCtx(input, ctx);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.too_small,\n minimum: check.value,\n type: \"string\",\n inclusive: true,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"max\") {\n if (input.data.length > check.value) {\n ctx = this._getOrReturnCtx(input, ctx);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.too_big,\n maximum: check.value,\n type: \"string\",\n inclusive: true,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"email\") {\n if (!emailRegex.test(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n parseUtil_1.addIssueToContext(ctx, {\n validation: \"email\",\n code: ZodError_1.ZodIssueCode.invalid_string,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"uuid\") {\n if (!uuidRegex.test(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n parseUtil_1.addIssueToContext(ctx, {\n validation: \"uuid\",\n code: ZodError_1.ZodIssueCode.invalid_string,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"cuid\") {\n if (!cuidRegex.test(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n parseUtil_1.addIssueToContext(ctx, {\n validation: \"cuid\",\n code: ZodError_1.ZodIssueCode.invalid_string,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"url\") {\n try {\n new URL(input.data);\n }\n catch (_a) {\n ctx = this._getOrReturnCtx(input, ctx);\n parseUtil_1.addIssueToContext(ctx, {\n validation: \"url\",\n code: ZodError_1.ZodIssueCode.invalid_string,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"regex\") {\n check.regex.lastIndex = 0;\n const testResult = check.regex.test(input.data);\n if (!testResult) {\n ctx = this._getOrReturnCtx(input, ctx);\n parseUtil_1.addIssueToContext(ctx, {\n validation: \"regex\",\n code: ZodError_1.ZodIssueCode.invalid_string,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"trim\") {\n input.data = input.data.trim();\n }\n else {\n util_1.util.assertNever(check);\n }\n }\n return { status: status.value, value: input.data };\n }\n _addCheck(check) {\n return new ZodString({\n ...this._def,\n checks: [...this._def.checks, check],\n });\n }\n email(message) {\n return this._addCheck({ kind: \"email\", ...errorUtil_1.errorUtil.errToObj(message) });\n }\n url(message) {\n return this._addCheck({ kind: \"url\", ...errorUtil_1.errorUtil.errToObj(message) });\n }\n uuid(message) {\n return this._addCheck({ kind: \"uuid\", ...errorUtil_1.errorUtil.errToObj(message) });\n }\n cuid(message) {\n return this._addCheck({ kind: \"cuid\", ...errorUtil_1.errorUtil.errToObj(message) });\n }\n regex(regex, message) {\n return this._addCheck({\n kind: \"regex\",\n regex: regex,\n ...errorUtil_1.errorUtil.errToObj(message),\n });\n }\n min(minLength, message) {\n return this._addCheck({\n kind: \"min\",\n value: minLength,\n ...errorUtil_1.errorUtil.errToObj(message),\n });\n }\n max(maxLength, message) {\n return this._addCheck({\n kind: \"max\",\n value: maxLength,\n ...errorUtil_1.errorUtil.errToObj(message),\n });\n }\n length(len, message) {\n return this.min(len, message).max(len, message);\n }\n get isEmail() {\n return !!this._def.checks.find((ch) => ch.kind === \"email\");\n }\n get isURL() {\n return !!this._def.checks.find((ch) => ch.kind === \"url\");\n }\n get isUUID() {\n return !!this._def.checks.find((ch) => ch.kind === \"uuid\");\n }\n get isCUID() {\n return !!this._def.checks.find((ch) => ch.kind === \"cuid\");\n }\n get minLength() {\n let min = -Infinity;\n this._def.checks.map((ch) => {\n if (ch.kind === \"min\") {\n if (min === null || ch.value > min) {\n min = ch.value;\n }\n }\n });\n return min;\n }\n get maxLength() {\n let max = null;\n this._def.checks.map((ch) => {\n if (ch.kind === \"max\") {\n if (max === null || ch.value < max) {\n max = ch.value;\n }\n }\n });\n return max;\n }\n}\nexports.ZodString = ZodString;\nZodString.create = (params) => {\n return new ZodString({\n checks: [],\n typeName: ZodFirstPartyTypeKind.ZodString,\n ...processCreateParams(params),\n });\n};\n// https://stackoverflow.com/questions/3966484/why-does-modulus-operator-return-fractional-number-in-javascript/31711034#31711034\nfunction floatSafeRemainder(val, step) {\n const valDecCount = (val.toString().split(\".\")[1] || \"\").length;\n const stepDecCount = (step.toString().split(\".\")[1] || \"\").length;\n const decCount = valDecCount > stepDecCount ? valDecCount : stepDecCount;\n const valInt = parseInt(val.toFixed(decCount).replace(\".\", \"\"));\n const stepInt = parseInt(step.toFixed(decCount).replace(\".\", \"\"));\n return (valInt % stepInt) / Math.pow(10, decCount);\n}\nclass ZodNumber extends ZodType {\n constructor() {\n super(...arguments);\n this.min = this.gte;\n this.max = this.lte;\n this.step = this.multipleOf;\n }\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== util_1.ZodParsedType.number) {\n const ctx = this._getOrReturnCtx(input);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.number,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n let ctx = undefined;\n const status = new parseUtil_1.ParseStatus();\n for (const check of this._def.checks) {\n if (check.kind === \"int\") {\n if (!util_1.util.isInteger(input.data)) {\n ctx = this._getOrReturnCtx(input, ctx);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: \"integer\",\n received: \"float\",\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"min\") {\n const tooSmall = check.inclusive\n ? input.data < check.value\n : input.data <= check.value;\n if (tooSmall) {\n ctx = this._getOrReturnCtx(input, ctx);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.too_small,\n minimum: check.value,\n type: \"number\",\n inclusive: check.inclusive,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"max\") {\n const tooBig = check.inclusive\n ? input.data > check.value\n : input.data >= check.value;\n if (tooBig) {\n ctx = this._getOrReturnCtx(input, ctx);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.too_big,\n maximum: check.value,\n type: \"number\",\n inclusive: check.inclusive,\n message: check.message,\n });\n status.dirty();\n }\n }\n else if (check.kind === \"multipleOf\") {\n if (floatSafeRemainder(input.data, check.value) !== 0) {\n ctx = this._getOrReturnCtx(input, ctx);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.not_multiple_of,\n multipleOf: check.value,\n message: check.message,\n });\n status.dirty();\n }\n }\n else {\n util_1.util.assertNever(check);\n }\n }\n return { status: status.value, value: input.data };\n }\n gte(value, message) {\n return this.setLimit(\"min\", value, true, errorUtil_1.errorUtil.toString(message));\n }\n gt(value, message) {\n return this.setLimit(\"min\", value, false, errorUtil_1.errorUtil.toString(message));\n }\n lte(value, message) {\n return this.setLimit(\"max\", value, true, errorUtil_1.errorUtil.toString(message));\n }\n lt(value, message) {\n return this.setLimit(\"max\", value, false, errorUtil_1.errorUtil.toString(message));\n }\n setLimit(kind, value, inclusive, message) {\n return new ZodNumber({\n ...this._def,\n checks: [\n ...this._def.checks,\n {\n kind,\n value,\n inclusive,\n message: errorUtil_1.errorUtil.toString(message),\n },\n ],\n });\n }\n _addCheck(check) {\n return new ZodNumber({\n ...this._def,\n checks: [...this._def.checks, check],\n });\n }\n int(message) {\n return this._addCheck({\n kind: \"int\",\n message: errorUtil_1.errorUtil.toString(message),\n });\n }\n positive(message) {\n return this._addCheck({\n kind: \"min\",\n value: 0,\n inclusive: false,\n message: errorUtil_1.errorUtil.toString(message),\n });\n }\n negative(message) {\n return this._addCheck({\n kind: \"max\",\n value: 0,\n inclusive: false,\n message: errorUtil_1.errorUtil.toString(message),\n });\n }\n nonpositive(message) {\n return this._addCheck({\n kind: \"max\",\n value: 0,\n inclusive: true,\n message: errorUtil_1.errorUtil.toString(message),\n });\n }\n nonnegative(message) {\n return this._addCheck({\n kind: \"min\",\n value: 0,\n inclusive: true,\n message: errorUtil_1.errorUtil.toString(message),\n });\n }\n multipleOf(value, message) {\n return this._addCheck({\n kind: \"multipleOf\",\n value: value,\n message: errorUtil_1.errorUtil.toString(message),\n });\n }\n get minValue() {\n let min = null;\n for (const ch of this._def.checks) {\n if (ch.kind === \"min\") {\n if (min === null || ch.value > min)\n min = ch.value;\n }\n }\n return min;\n }\n get maxValue() {\n let max = null;\n for (const ch of this._def.checks) {\n if (ch.kind === \"max\") {\n if (max === null || ch.value < max)\n max = ch.value;\n }\n }\n return max;\n }\n get isInt() {\n return !!this._def.checks.find((ch) => ch.kind === \"int\");\n }\n}\nexports.ZodNumber = ZodNumber;\nZodNumber.create = (params) => {\n return new ZodNumber({\n checks: [],\n typeName: ZodFirstPartyTypeKind.ZodNumber,\n ...processCreateParams(params),\n });\n};\nclass ZodBigInt extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== util_1.ZodParsedType.bigint) {\n const ctx = this._getOrReturnCtx(input);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.bigint,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n return parseUtil_1.OK(input.data);\n }\n}\nexports.ZodBigInt = ZodBigInt;\nZodBigInt.create = (params) => {\n return new ZodBigInt({\n typeName: ZodFirstPartyTypeKind.ZodBigInt,\n ...processCreateParams(params),\n });\n};\nclass ZodBoolean extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== util_1.ZodParsedType.boolean) {\n const ctx = this._getOrReturnCtx(input);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.boolean,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n return parseUtil_1.OK(input.data);\n }\n}\nexports.ZodBoolean = ZodBoolean;\nZodBoolean.create = (params) => {\n return new ZodBoolean({\n typeName: ZodFirstPartyTypeKind.ZodBoolean,\n ...processCreateParams(params),\n });\n};\nclass ZodDate extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== util_1.ZodParsedType.date) {\n const ctx = this._getOrReturnCtx(input);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.date,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n if (isNaN(input.data.getTime())) {\n const ctx = this._getOrReturnCtx(input);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_date,\n });\n return parseUtil_1.INVALID;\n }\n return {\n status: \"valid\",\n value: new Date(input.data.getTime()),\n };\n }\n}\nexports.ZodDate = ZodDate;\nZodDate.create = (params) => {\n return new ZodDate({\n typeName: ZodFirstPartyTypeKind.ZodDate,\n ...processCreateParams(params),\n });\n};\nclass ZodUndefined extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== util_1.ZodParsedType.undefined) {\n const ctx = this._getOrReturnCtx(input);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.undefined,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n return parseUtil_1.OK(input.data);\n }\n}\nexports.ZodUndefined = ZodUndefined;\nZodUndefined.create = (params) => {\n return new ZodUndefined({\n typeName: ZodFirstPartyTypeKind.ZodUndefined,\n ...processCreateParams(params),\n });\n};\nclass ZodNull extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== util_1.ZodParsedType.null) {\n const ctx = this._getOrReturnCtx(input);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.null,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n return parseUtil_1.OK(input.data);\n }\n}\nexports.ZodNull = ZodNull;\nZodNull.create = (params) => {\n return new ZodNull({\n typeName: ZodFirstPartyTypeKind.ZodNull,\n ...processCreateParams(params),\n });\n};\nclass ZodAny extends ZodType {\n constructor() {\n super(...arguments);\n // to prevent instances of other classes from extending ZodAny. this causes issues with catchall in ZodObject.\n this._any = true;\n }\n _parse(input) {\n return parseUtil_1.OK(input.data);\n }\n}\nexports.ZodAny = ZodAny;\nZodAny.create = (params) => {\n return new ZodAny({\n typeName: ZodFirstPartyTypeKind.ZodAny,\n ...processCreateParams(params),\n });\n};\nclass ZodUnknown extends ZodType {\n constructor() {\n super(...arguments);\n // required\n this._unknown = true;\n }\n _parse(input) {\n return parseUtil_1.OK(input.data);\n }\n}\nexports.ZodUnknown = ZodUnknown;\nZodUnknown.create = (params) => {\n return new ZodUnknown({\n typeName: ZodFirstPartyTypeKind.ZodUnknown,\n ...processCreateParams(params),\n });\n};\nclass ZodNever extends ZodType {\n _parse(input) {\n const ctx = this._getOrReturnCtx(input);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.never,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n}\nexports.ZodNever = ZodNever;\nZodNever.create = (params) => {\n return new ZodNever({\n typeName: ZodFirstPartyTypeKind.ZodNever,\n ...processCreateParams(params),\n });\n};\nclass ZodVoid extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== util_1.ZodParsedType.undefined) {\n const ctx = this._getOrReturnCtx(input);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.void,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n return parseUtil_1.OK(input.data);\n }\n}\nexports.ZodVoid = ZodVoid;\nZodVoid.create = (params) => {\n return new ZodVoid({\n typeName: ZodFirstPartyTypeKind.ZodVoid,\n ...processCreateParams(params),\n });\n};\nclass ZodArray extends ZodType {\n _parse(input) {\n const { ctx, status } = this._processInputParams(input);\n const def = this._def;\n if (ctx.parsedType !== util_1.ZodParsedType.array) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.array,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n if (def.minLength !== null) {\n if (ctx.data.length < def.minLength.value) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.too_small,\n minimum: def.minLength.value,\n type: \"array\",\n inclusive: true,\n message: def.minLength.message,\n });\n status.dirty();\n }\n }\n if (def.maxLength !== null) {\n if (ctx.data.length > def.maxLength.value) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.too_big,\n maximum: def.maxLength.value,\n type: \"array\",\n inclusive: true,\n message: def.maxLength.message,\n });\n status.dirty();\n }\n }\n if (ctx.common.async) {\n return Promise.all(ctx.data.map((item, i) => {\n return def.type._parseAsync(new ParseInputLazyPath(ctx, item, ctx.path, i));\n })).then((result) => {\n return parseUtil_1.ParseStatus.mergeArray(status, result);\n });\n }\n const result = ctx.data.map((item, i) => {\n return def.type._parseSync(new ParseInputLazyPath(ctx, item, ctx.path, i));\n });\n return parseUtil_1.ParseStatus.mergeArray(status, result);\n }\n get element() {\n return this._def.type;\n }\n min(minLength, message) {\n return new ZodArray({\n ...this._def,\n minLength: { value: minLength, message: errorUtil_1.errorUtil.toString(message) },\n });\n }\n max(maxLength, message) {\n return new ZodArray({\n ...this._def,\n maxLength: { value: maxLength, message: errorUtil_1.errorUtil.toString(message) },\n });\n }\n length(len, message) {\n return this.min(len, message).max(len, message);\n }\n nonempty(message) {\n return this.min(1, message);\n }\n}\nexports.ZodArray = ZodArray;\nZodArray.create = (schema, params) => {\n return new ZodArray({\n type: schema,\n minLength: null,\n maxLength: null,\n typeName: ZodFirstPartyTypeKind.ZodArray,\n ...processCreateParams(params),\n });\n};\n/////////////////////////////////////////\n/////////////////////////////////////////\n////////// //////////\n////////// ZodObject //////////\n////////// //////////\n/////////////////////////////////////////\n/////////////////////////////////////////\nvar objectUtil;\n(function (objectUtil) {\n objectUtil.mergeShapes = (first, second) => {\n return {\n ...first,\n ...second,\n };\n };\n})(objectUtil = exports.objectUtil || (exports.objectUtil = {}));\nconst AugmentFactory = (def) => (augmentation) => {\n return new ZodObject({\n ...def,\n shape: () => ({\n ...def.shape(),\n ...augmentation,\n }),\n });\n};\nfunction deepPartialify(schema) {\n if (schema instanceof ZodObject) {\n const newShape = {};\n for (const key in schema.shape) {\n const fieldSchema = schema.shape[key];\n newShape[key] = ZodOptional.create(deepPartialify(fieldSchema));\n }\n return new ZodObject({\n ...schema._def,\n shape: () => newShape,\n });\n }\n else if (schema instanceof ZodArray) {\n return ZodArray.create(deepPartialify(schema.element));\n }\n else if (schema instanceof ZodOptional) {\n return ZodOptional.create(deepPartialify(schema.unwrap()));\n }\n else if (schema instanceof ZodNullable) {\n return ZodNullable.create(deepPartialify(schema.unwrap()));\n }\n else if (schema instanceof ZodTuple) {\n return ZodTuple.create(schema.items.map((item) => deepPartialify(item)));\n }\n else {\n return schema;\n }\n}\nclass ZodObject extends ZodType {\n constructor() {\n super(...arguments);\n this._cached = null;\n /**\n * @deprecated In most cases, this is no longer needed - unknown properties are now silently stripped.\n * If you want to pass through unknown properties, use `.passthrough()` instead.\n */\n this.nonstrict = this.passthrough;\n this.augment = AugmentFactory(this._def);\n this.extend = AugmentFactory(this._def);\n }\n _getCached() {\n if (this._cached !== null)\n return this._cached;\n const shape = this._def.shape();\n const keys = util_1.util.objectKeys(shape);\n return (this._cached = { shape, keys });\n }\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== util_1.ZodParsedType.object) {\n const ctx = this._getOrReturnCtx(input);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.object,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n const { status, ctx } = this._processInputParams(input);\n const { shape, keys: shapeKeys } = this._getCached();\n const extraKeys = [];\n for (const key in ctx.data) {\n if (!shapeKeys.includes(key)) {\n extraKeys.push(key);\n }\n }\n const pairs = [];\n for (const key of shapeKeys) {\n const keyValidator = shape[key];\n const value = ctx.data[key];\n pairs.push({\n key: { status: \"valid\", value: key },\n value: keyValidator._parse(new ParseInputLazyPath(ctx, value, ctx.path, key)),\n alwaysSet: key in ctx.data,\n });\n }\n if (this._def.catchall instanceof ZodNever) {\n const unknownKeys = this._def.unknownKeys;\n if (unknownKeys === \"passthrough\") {\n for (const key of extraKeys) {\n pairs.push({\n key: { status: \"valid\", value: key },\n value: { status: \"valid\", value: ctx.data[key] },\n });\n }\n }\n else if (unknownKeys === \"strict\") {\n if (extraKeys.length > 0) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.unrecognized_keys,\n keys: extraKeys,\n });\n status.dirty();\n }\n }\n else if (unknownKeys === \"strip\") {\n }\n else {\n throw new Error(`Internal ZodObject error: invalid unknownKeys value.`);\n }\n }\n else {\n // run catchall validation\n const catchall = this._def.catchall;\n for (const key of extraKeys) {\n const value = ctx.data[key];\n pairs.push({\n key: { status: \"valid\", value: key },\n value: catchall._parse(new ParseInputLazyPath(ctx, value, ctx.path, key) //, ctx.child(key), value, getParsedType(value)\n ),\n alwaysSet: key in ctx.data,\n });\n }\n }\n if (ctx.common.async) {\n return Promise.resolve()\n .then(async () => {\n const syncPairs = [];\n for (const pair of pairs) {\n const key = await pair.key;\n syncPairs.push({\n key,\n value: await pair.value,\n alwaysSet: pair.alwaysSet,\n });\n }\n return syncPairs;\n })\n .then((syncPairs) => {\n return parseUtil_1.ParseStatus.mergeObjectSync(status, syncPairs);\n });\n }\n else {\n return parseUtil_1.ParseStatus.mergeObjectSync(status, pairs);\n }\n }\n get shape() {\n return this._def.shape();\n }\n strict(message) {\n errorUtil_1.errorUtil.errToObj;\n return new ZodObject({\n ...this._def,\n unknownKeys: \"strict\",\n ...(message !== undefined\n ? {\n errorMap: (issue, ctx) => {\n var _a, _b, _c, _d;\n const defaultError = (_c = (_b = (_a = this._def).errorMap) === null || _b === void 0 ? void 0 : _b.call(_a, issue, ctx).message) !== null && _c !== void 0 ? _c : ctx.defaultError;\n if (issue.code === \"unrecognized_keys\")\n return {\n message: (_d = errorUtil_1.errorUtil.errToObj(message).message) !== null && _d !== void 0 ? _d : defaultError,\n };\n return {\n message: defaultError,\n };\n },\n }\n : {}),\n });\n }\n strip() {\n return new ZodObject({\n ...this._def,\n unknownKeys: \"strip\",\n });\n }\n passthrough() {\n return new ZodObject({\n ...this._def,\n unknownKeys: \"passthrough\",\n });\n }\n setKey(key, schema) {\n return this.augment({ [key]: schema });\n }\n /**\n * Prior to zod@1.0.12 there was a bug in the\n * inferred type of merged objects. Please\n * upgrade if you are experiencing issues.\n */\n merge(merging) {\n // const mergedShape = objectUtil.mergeShapes(\n // this._def.shape(),\n // merging._def.shape()\n // );\n const merged = new ZodObject({\n unknownKeys: merging._def.unknownKeys,\n catchall: merging._def.catchall,\n shape: () => objectUtil.mergeShapes(this._def.shape(), merging._def.shape()),\n typeName: ZodFirstPartyTypeKind.ZodObject,\n });\n return merged;\n }\n catchall(index) {\n return new ZodObject({\n ...this._def,\n catchall: index,\n });\n }\n pick(mask) {\n const shape = {};\n util_1.util.objectKeys(mask).map((key) => {\n // only add to shape if key corresponds to an element of the current shape\n if (this.shape[key])\n shape[key] = this.shape[key];\n });\n return new ZodObject({\n ...this._def,\n shape: () => shape,\n });\n }\n omit(mask) {\n const shape = {};\n util_1.util.objectKeys(this.shape).map((key) => {\n if (util_1.util.objectKeys(mask).indexOf(key) === -1) {\n shape[key] = this.shape[key];\n }\n });\n return new ZodObject({\n ...this._def,\n shape: () => shape,\n });\n }\n deepPartial() {\n return deepPartialify(this);\n }\n partial(mask) {\n const newShape = {};\n if (mask) {\n util_1.util.objectKeys(this.shape).map((key) => {\n if (util_1.util.objectKeys(mask).indexOf(key) === -1) {\n newShape[key] = this.shape[key];\n }\n else {\n newShape[key] = this.shape[key].optional();\n }\n });\n return new ZodObject({\n ...this._def,\n shape: () => newShape,\n });\n }\n else {\n for (const key in this.shape) {\n const fieldSchema = this.shape[key];\n newShape[key] = fieldSchema.optional();\n }\n }\n return new ZodObject({\n ...this._def,\n shape: () => newShape,\n });\n }\n required() {\n const newShape = {};\n for (const key in this.shape) {\n const fieldSchema = this.shape[key];\n let newField = fieldSchema;\n while (newField instanceof ZodOptional) {\n newField = newField._def.innerType;\n }\n newShape[key] = newField;\n }\n return new ZodObject({\n ...this._def,\n shape: () => newShape,\n });\n }\n}\nexports.ZodObject = ZodObject;\nZodObject.create = (shape, params) => {\n return new ZodObject({\n shape: () => shape,\n unknownKeys: \"strip\",\n catchall: ZodNever.create(),\n typeName: ZodFirstPartyTypeKind.ZodObject,\n ...processCreateParams(params),\n });\n};\nZodObject.strictCreate = (shape, params) => {\n return new ZodObject({\n shape: () => shape,\n unknownKeys: \"strict\",\n catchall: ZodNever.create(),\n typeName: ZodFirstPartyTypeKind.ZodObject,\n ...processCreateParams(params),\n });\n};\nZodObject.lazycreate = (shape, params) => {\n return new ZodObject({\n shape,\n unknownKeys: \"strip\",\n catchall: ZodNever.create(),\n typeName: ZodFirstPartyTypeKind.ZodObject,\n ...processCreateParams(params),\n });\n};\nclass ZodUnion extends ZodType {\n _parse(input) {\n const { ctx } = this._processInputParams(input);\n const options = this._def.options;\n function handleResults(results) {\n // return first issue-free validation if it exists\n for (const result of results) {\n if (result.result.status === \"valid\") {\n return result.result;\n }\n }\n for (const result of results) {\n if (result.result.status === \"dirty\") {\n // add issues from dirty option\n ctx.common.issues.push(...result.ctx.common.issues);\n return result.result;\n }\n }\n // return invalid\n const unionErrors = results.map((result) => new ZodError_1.ZodError(result.ctx.common.issues));\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_union,\n unionErrors,\n });\n return parseUtil_1.INVALID;\n }\n if (ctx.common.async) {\n return Promise.all(options.map(async (option) => {\n const childCtx = {\n ...ctx,\n common: {\n ...ctx.common,\n issues: [],\n },\n parent: null,\n };\n return {\n result: await option._parseAsync({\n data: ctx.data,\n path: ctx.path,\n parent: childCtx,\n }),\n ctx: childCtx,\n };\n })).then(handleResults);\n }\n else {\n let dirty = undefined;\n const issues = [];\n for (const option of options) {\n const childCtx = {\n ...ctx,\n common: {\n ...ctx.common,\n issues: [],\n },\n parent: null,\n };\n const result = option._parseSync({\n data: ctx.data,\n path: ctx.path,\n parent: childCtx,\n });\n if (result.status === \"valid\") {\n return result;\n }\n else if (result.status === \"dirty\" && !dirty) {\n dirty = { result, ctx: childCtx };\n }\n if (childCtx.common.issues.length) {\n issues.push(childCtx.common.issues);\n }\n }\n if (dirty) {\n ctx.common.issues.push(...dirty.ctx.common.issues);\n return dirty.result;\n }\n const unionErrors = issues.map((issues) => new ZodError_1.ZodError(issues));\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_union,\n unionErrors,\n });\n return parseUtil_1.INVALID;\n }\n }\n get options() {\n return this._def.options;\n }\n}\nexports.ZodUnion = ZodUnion;\nZodUnion.create = (types, params) => {\n return new ZodUnion({\n options: types,\n typeName: ZodFirstPartyTypeKind.ZodUnion,\n ...processCreateParams(params),\n });\n};\nclass ZodDiscriminatedUnion extends ZodType {\n _parse(input) {\n const { ctx } = this._processInputParams(input);\n if (ctx.parsedType !== util_1.ZodParsedType.object) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.object,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n const discriminator = this.discriminator;\n const discriminatorValue = ctx.data[discriminator];\n const option = this.options.get(discriminatorValue);\n if (!option) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_union_discriminator,\n options: this.validDiscriminatorValues,\n path: [discriminator],\n });\n return parseUtil_1.INVALID;\n }\n if (ctx.common.async) {\n return option._parseAsync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx,\n });\n }\n else {\n return option._parseSync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx,\n });\n }\n }\n get discriminator() {\n return this._def.discriminator;\n }\n get validDiscriminatorValues() {\n return Array.from(this.options.keys());\n }\n get options() {\n return this._def.options;\n }\n /**\n * The constructor of the discriminated union schema. Its behaviour is very similar to that of the normal z.union() constructor.\n * However, it only allows a union of objects, all of which need to share a discriminator property. This property must\n * have a different value for each object in the union.\n * @param discriminator the name of the discriminator property\n * @param types an array of object schemas\n * @param params\n */\n static create(discriminator, types, params) {\n // Get all the valid discriminator values\n const options = new Map();\n try {\n types.forEach((type) => {\n const discriminatorValue = type.shape[discriminator].value;\n options.set(discriminatorValue, type);\n });\n }\n catch (e) {\n throw new Error(\"The discriminator value could not be extracted from all the provided schemas\");\n }\n // Assert that all the discriminator values are unique\n if (options.size !== types.length) {\n throw new Error(\"Some of the discriminator values are not unique\");\n }\n return new ZodDiscriminatedUnion({\n typeName: ZodFirstPartyTypeKind.ZodDiscriminatedUnion,\n discriminator,\n options,\n ...processCreateParams(params),\n });\n }\n}\nexports.ZodDiscriminatedUnion = ZodDiscriminatedUnion;\nfunction mergeValues(a, b) {\n const aType = util_1.getParsedType(a);\n const bType = util_1.getParsedType(b);\n if (a === b) {\n return { valid: true, data: a };\n }\n else if (aType === util_1.ZodParsedType.object && bType === util_1.ZodParsedType.object) {\n const bKeys = util_1.util.objectKeys(b);\n const sharedKeys = util_1.util\n .objectKeys(a)\n .filter((key) => bKeys.indexOf(key) !== -1);\n const newObj = { ...a, ...b };\n for (const key of sharedKeys) {\n const sharedValue = mergeValues(a[key], b[key]);\n if (!sharedValue.valid) {\n return { valid: false };\n }\n newObj[key] = sharedValue.data;\n }\n return { valid: true, data: newObj };\n }\n else if (aType === util_1.ZodParsedType.array && bType === util_1.ZodParsedType.array) {\n if (a.length !== b.length) {\n return { valid: false };\n }\n const newArray = [];\n for (let index = 0; index < a.length; index++) {\n const itemA = a[index];\n const itemB = b[index];\n const sharedValue = mergeValues(itemA, itemB);\n if (!sharedValue.valid) {\n return { valid: false };\n }\n newArray.push(sharedValue.data);\n }\n return { valid: true, data: newArray };\n }\n else if (aType === util_1.ZodParsedType.date &&\n bType === util_1.ZodParsedType.date &&\n +a === +b) {\n return { valid: true, data: a };\n }\n else {\n return { valid: false };\n }\n}\nclass ZodIntersection extends ZodType {\n _parse(input) {\n const { status, ctx } = this._processInputParams(input);\n const handleParsed = (parsedLeft, parsedRight) => {\n if (parseUtil_1.isAborted(parsedLeft) || parseUtil_1.isAborted(parsedRight)) {\n return parseUtil_1.INVALID;\n }\n const merged = mergeValues(parsedLeft.value, parsedRight.value);\n if (!merged.valid) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_intersection_types,\n });\n return parseUtil_1.INVALID;\n }\n if (parseUtil_1.isDirty(parsedLeft) || parseUtil_1.isDirty(parsedRight)) {\n status.dirty();\n }\n return { status: status.value, value: merged.data };\n };\n if (ctx.common.async) {\n return Promise.all([\n this._def.left._parseAsync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx,\n }),\n this._def.right._parseAsync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx,\n }),\n ]).then(([left, right]) => handleParsed(left, right));\n }\n else {\n return handleParsed(this._def.left._parseSync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx,\n }), this._def.right._parseSync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx,\n }));\n }\n }\n}\nexports.ZodIntersection = ZodIntersection;\nZodIntersection.create = (left, right, params) => {\n return new ZodIntersection({\n left: left,\n right: right,\n typeName: ZodFirstPartyTypeKind.ZodIntersection,\n ...processCreateParams(params),\n });\n};\nclass ZodTuple extends ZodType {\n _parse(input) {\n const { status, ctx } = this._processInputParams(input);\n if (ctx.parsedType !== util_1.ZodParsedType.array) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.array,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n if (ctx.data.length < this._def.items.length) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.too_small,\n minimum: this._def.items.length,\n inclusive: true,\n type: \"array\",\n });\n return parseUtil_1.INVALID;\n }\n const rest = this._def.rest;\n if (!rest && ctx.data.length > this._def.items.length) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.too_big,\n maximum: this._def.items.length,\n inclusive: true,\n type: \"array\",\n });\n status.dirty();\n }\n const items = ctx.data\n .map((item, itemIndex) => {\n const schema = this._def.items[itemIndex] || this._def.rest;\n if (!schema)\n return null;\n return schema._parse(new ParseInputLazyPath(ctx, item, ctx.path, itemIndex));\n })\n .filter((x) => !!x); // filter nulls\n if (ctx.common.async) {\n return Promise.all(items).then((results) => {\n return parseUtil_1.ParseStatus.mergeArray(status, results);\n });\n }\n else {\n return parseUtil_1.ParseStatus.mergeArray(status, items);\n }\n }\n get items() {\n return this._def.items;\n }\n rest(rest) {\n return new ZodTuple({\n ...this._def,\n rest,\n });\n }\n}\nexports.ZodTuple = ZodTuple;\nZodTuple.create = (schemas, params) => {\n return new ZodTuple({\n items: schemas,\n typeName: ZodFirstPartyTypeKind.ZodTuple,\n rest: null,\n ...processCreateParams(params),\n });\n};\nclass ZodRecord extends ZodType {\n get keySchema() {\n return this._def.keyType;\n }\n get valueSchema() {\n return this._def.valueType;\n }\n _parse(input) {\n const { status, ctx } = this._processInputParams(input);\n if (ctx.parsedType !== util_1.ZodParsedType.object) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.object,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n const pairs = [];\n const keyType = this._def.keyType;\n const valueType = this._def.valueType;\n for (const key in ctx.data) {\n pairs.push({\n key: keyType._parse(new ParseInputLazyPath(ctx, key, ctx.path, key)),\n value: valueType._parse(new ParseInputLazyPath(ctx, ctx.data[key], ctx.path, key)),\n });\n }\n if (ctx.common.async) {\n return parseUtil_1.ParseStatus.mergeObjectAsync(status, pairs);\n }\n else {\n return parseUtil_1.ParseStatus.mergeObjectSync(status, pairs);\n }\n }\n get element() {\n return this._def.valueType;\n }\n static create(first, second, third) {\n if (second instanceof ZodType) {\n return new ZodRecord({\n keyType: first,\n valueType: second,\n typeName: ZodFirstPartyTypeKind.ZodRecord,\n ...processCreateParams(third),\n });\n }\n return new ZodRecord({\n keyType: ZodString.create(),\n valueType: first,\n typeName: ZodFirstPartyTypeKind.ZodRecord,\n ...processCreateParams(second),\n });\n }\n}\nexports.ZodRecord = ZodRecord;\nclass ZodMap extends ZodType {\n _parse(input) {\n const { status, ctx } = this._processInputParams(input);\n if (ctx.parsedType !== util_1.ZodParsedType.map) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.map,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n const keyType = this._def.keyType;\n const valueType = this._def.valueType;\n const pairs = [...ctx.data.entries()].map(([key, value], index) => {\n return {\n key: keyType._parse(new ParseInputLazyPath(ctx, key, ctx.path, [index, \"key\"])),\n value: valueType._parse(new ParseInputLazyPath(ctx, value, ctx.path, [index, \"value\"])),\n };\n });\n if (ctx.common.async) {\n const finalMap = new Map();\n return Promise.resolve().then(async () => {\n for (const pair of pairs) {\n const key = await pair.key;\n const value = await pair.value;\n if (key.status === \"aborted\" || value.status === \"aborted\") {\n return parseUtil_1.INVALID;\n }\n if (key.status === \"dirty\" || value.status === \"dirty\") {\n status.dirty();\n }\n finalMap.set(key.value, value.value);\n }\n return { status: status.value, value: finalMap };\n });\n }\n else {\n const finalMap = new Map();\n for (const pair of pairs) {\n const key = pair.key;\n const value = pair.value;\n if (key.status === \"aborted\" || value.status === \"aborted\") {\n return parseUtil_1.INVALID;\n }\n if (key.status === \"dirty\" || value.status === \"dirty\") {\n status.dirty();\n }\n finalMap.set(key.value, value.value);\n }\n return { status: status.value, value: finalMap };\n }\n }\n}\nexports.ZodMap = ZodMap;\nZodMap.create = (keyType, valueType, params) => {\n return new ZodMap({\n valueType,\n keyType,\n typeName: ZodFirstPartyTypeKind.ZodMap,\n ...processCreateParams(params),\n });\n};\nclass ZodSet extends ZodType {\n _parse(input) {\n const { status, ctx } = this._processInputParams(input);\n if (ctx.parsedType !== util_1.ZodParsedType.set) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.set,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n const def = this._def;\n if (def.minSize !== null) {\n if (ctx.data.size < def.minSize.value) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.too_small,\n minimum: def.minSize.value,\n type: \"set\",\n inclusive: true,\n message: def.minSize.message,\n });\n status.dirty();\n }\n }\n if (def.maxSize !== null) {\n if (ctx.data.size > def.maxSize.value) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.too_big,\n maximum: def.maxSize.value,\n type: \"set\",\n inclusive: true,\n message: def.maxSize.message,\n });\n status.dirty();\n }\n }\n const valueType = this._def.valueType;\n function finalizeSet(elements) {\n const parsedSet = new Set();\n for (const element of elements) {\n if (element.status === \"aborted\")\n return parseUtil_1.INVALID;\n if (element.status === \"dirty\")\n status.dirty();\n parsedSet.add(element.value);\n }\n return { status: status.value, value: parsedSet };\n }\n const elements = [...ctx.data.values()].map((item, i) => valueType._parse(new ParseInputLazyPath(ctx, item, ctx.path, i)));\n if (ctx.common.async) {\n return Promise.all(elements).then((elements) => finalizeSet(elements));\n }\n else {\n return finalizeSet(elements);\n }\n }\n min(minSize, message) {\n return new ZodSet({\n ...this._def,\n minSize: { value: minSize, message: errorUtil_1.errorUtil.toString(message) },\n });\n }\n max(maxSize, message) {\n return new ZodSet({\n ...this._def,\n maxSize: { value: maxSize, message: errorUtil_1.errorUtil.toString(message) },\n });\n }\n size(size, message) {\n return this.min(size, message).max(size, message);\n }\n nonempty(message) {\n return this.min(1, message);\n }\n}\nexports.ZodSet = ZodSet;\nZodSet.create = (valueType, params) => {\n return new ZodSet({\n valueType,\n minSize: null,\n maxSize: null,\n typeName: ZodFirstPartyTypeKind.ZodSet,\n ...processCreateParams(params),\n });\n};\nclass ZodFunction extends ZodType {\n constructor() {\n super(...arguments);\n this.validate = this.implement;\n }\n _parse(input) {\n const { ctx } = this._processInputParams(input);\n if (ctx.parsedType !== util_1.ZodParsedType.function) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.function,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n function makeArgsIssue(args, error) {\n return parseUtil_1.makeIssue({\n data: args,\n path: ctx.path,\n errorMaps: [\n ctx.common.contextualErrorMap,\n ctx.schemaErrorMap,\n ZodError_1.overrideErrorMap,\n ZodError_1.defaultErrorMap,\n ].filter((x) => !!x),\n issueData: {\n code: ZodError_1.ZodIssueCode.invalid_arguments,\n argumentsError: error,\n },\n });\n }\n function makeReturnsIssue(returns, error) {\n return parseUtil_1.makeIssue({\n data: returns,\n path: ctx.path,\n errorMaps: [\n ctx.common.contextualErrorMap,\n ctx.schemaErrorMap,\n ZodError_1.overrideErrorMap,\n ZodError_1.defaultErrorMap,\n ].filter((x) => !!x),\n issueData: {\n code: ZodError_1.ZodIssueCode.invalid_return_type,\n returnTypeError: error,\n },\n });\n }\n const params = { errorMap: ctx.common.contextualErrorMap };\n const fn = ctx.data;\n if (this._def.returns instanceof ZodPromise) {\n return parseUtil_1.OK(async (...args) => {\n const error = new ZodError_1.ZodError([]);\n const parsedArgs = await this._def.args\n .parseAsync(args, params)\n .catch((e) => {\n error.addIssue(makeArgsIssue(args, e));\n throw error;\n });\n const result = await fn(...parsedArgs);\n const parsedReturns = await this._def.returns._def.type\n .parseAsync(result, params)\n .catch((e) => {\n error.addIssue(makeReturnsIssue(result, e));\n throw error;\n });\n return parsedReturns;\n });\n }\n else {\n return parseUtil_1.OK((...args) => {\n const parsedArgs = this._def.args.safeParse(args, params);\n if (!parsedArgs.success) {\n throw new ZodError_1.ZodError([makeArgsIssue(args, parsedArgs.error)]);\n }\n const result = fn(...parsedArgs.data);\n const parsedReturns = this._def.returns.safeParse(result, params);\n if (!parsedReturns.success) {\n throw new ZodError_1.ZodError([makeReturnsIssue(result, parsedReturns.error)]);\n }\n return parsedReturns.data;\n });\n }\n }\n parameters() {\n return this._def.args;\n }\n returnType() {\n return this._def.returns;\n }\n args(...items) {\n return new ZodFunction({\n ...this._def,\n args: ZodTuple.create(items).rest(ZodUnknown.create()),\n });\n }\n returns(returnType) {\n return new ZodFunction({\n ...this._def,\n returns: returnType,\n });\n }\n implement(func) {\n const validatedFunc = this.parse(func);\n return validatedFunc;\n }\n strictImplement(func) {\n const validatedFunc = this.parse(func);\n return validatedFunc;\n }\n}\nexports.ZodFunction = ZodFunction;\nZodFunction.create = (args, returns, params) => {\n return new ZodFunction({\n args: (args\n ? args.rest(ZodUnknown.create())\n : ZodTuple.create([]).rest(ZodUnknown.create())),\n returns: returns || ZodUnknown.create(),\n typeName: ZodFirstPartyTypeKind.ZodFunction,\n ...processCreateParams(params),\n });\n};\nclass ZodLazy extends ZodType {\n get schema() {\n return this._def.getter();\n }\n _parse(input) {\n const { ctx } = this._processInputParams(input);\n const lazySchema = this._def.getter();\n return lazySchema._parse({ data: ctx.data, path: ctx.path, parent: ctx });\n }\n}\nexports.ZodLazy = ZodLazy;\nZodLazy.create = (getter, params) => {\n return new ZodLazy({\n getter: getter,\n typeName: ZodFirstPartyTypeKind.ZodLazy,\n ...processCreateParams(params),\n });\n};\nclass ZodLiteral extends ZodType {\n _parse(input) {\n if (input.data !== this._def.value) {\n const ctx = this._getOrReturnCtx(input);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_literal,\n expected: this._def.value,\n });\n return parseUtil_1.INVALID;\n }\n return { status: \"valid\", value: input.data };\n }\n get value() {\n return this._def.value;\n }\n}\nexports.ZodLiteral = ZodLiteral;\nZodLiteral.create = (value, params) => {\n return new ZodLiteral({\n value: value,\n typeName: ZodFirstPartyTypeKind.ZodLiteral,\n ...processCreateParams(params),\n });\n};\nfunction createZodEnum(values, params) {\n return new ZodEnum({\n values: values,\n typeName: ZodFirstPartyTypeKind.ZodEnum,\n ...processCreateParams(params),\n });\n}\nclass ZodEnum extends ZodType {\n _parse(input) {\n if (typeof input.data !== \"string\") {\n const ctx = this._getOrReturnCtx(input);\n const expectedValues = this._def.values;\n parseUtil_1.addIssueToContext(ctx, {\n expected: util_1.util.joinValues(expectedValues),\n received: ctx.parsedType,\n code: ZodError_1.ZodIssueCode.invalid_type,\n });\n return parseUtil_1.INVALID;\n }\n if (this._def.values.indexOf(input.data) === -1) {\n const ctx = this._getOrReturnCtx(input);\n const expectedValues = this._def.values;\n parseUtil_1.addIssueToContext(ctx, {\n received: ctx.data,\n code: ZodError_1.ZodIssueCode.invalid_enum_value,\n options: expectedValues,\n });\n return parseUtil_1.INVALID;\n }\n return parseUtil_1.OK(input.data);\n }\n get options() {\n return this._def.values;\n }\n get enum() {\n const enumValues = {};\n for (const val of this._def.values) {\n enumValues[val] = val;\n }\n return enumValues;\n }\n get Values() {\n const enumValues = {};\n for (const val of this._def.values) {\n enumValues[val] = val;\n }\n return enumValues;\n }\n get Enum() {\n const enumValues = {};\n for (const val of this._def.values) {\n enumValues[val] = val;\n }\n return enumValues;\n }\n}\nexports.ZodEnum = ZodEnum;\nZodEnum.create = createZodEnum;\nclass ZodNativeEnum extends ZodType {\n _parse(input) {\n const nativeEnumValues = util_1.util.getValidEnumValues(this._def.values);\n const ctx = this._getOrReturnCtx(input);\n if (ctx.parsedType !== util_1.ZodParsedType.string &&\n ctx.parsedType !== util_1.ZodParsedType.number) {\n const expectedValues = util_1.util.objectValues(nativeEnumValues);\n parseUtil_1.addIssueToContext(ctx, {\n expected: util_1.util.joinValues(expectedValues),\n received: ctx.parsedType,\n code: ZodError_1.ZodIssueCode.invalid_type,\n });\n return parseUtil_1.INVALID;\n }\n if (nativeEnumValues.indexOf(input.data) === -1) {\n const expectedValues = util_1.util.objectValues(nativeEnumValues);\n parseUtil_1.addIssueToContext(ctx, {\n received: ctx.data,\n code: ZodError_1.ZodIssueCode.invalid_enum_value,\n options: expectedValues,\n });\n return parseUtil_1.INVALID;\n }\n return parseUtil_1.OK(input.data);\n }\n get enum() {\n return this._def.values;\n }\n}\nexports.ZodNativeEnum = ZodNativeEnum;\nZodNativeEnum.create = (values, params) => {\n return new ZodNativeEnum({\n values: values,\n typeName: ZodFirstPartyTypeKind.ZodNativeEnum,\n ...processCreateParams(params),\n });\n};\nclass ZodPromise extends ZodType {\n _parse(input) {\n const { ctx } = this._processInputParams(input);\n if (ctx.parsedType !== util_1.ZodParsedType.promise &&\n ctx.common.async === false) {\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.promise,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n const promisified = ctx.parsedType === util_1.ZodParsedType.promise\n ? ctx.data\n : Promise.resolve(ctx.data);\n return parseUtil_1.OK(promisified.then((data) => {\n return this._def.type.parseAsync(data, {\n path: ctx.path,\n errorMap: ctx.common.contextualErrorMap,\n });\n }));\n }\n}\nexports.ZodPromise = ZodPromise;\nZodPromise.create = (schema, params) => {\n return new ZodPromise({\n type: schema,\n typeName: ZodFirstPartyTypeKind.ZodPromise,\n ...processCreateParams(params),\n });\n};\nclass ZodEffects extends ZodType {\n innerType() {\n return this._def.schema;\n }\n _parse(input) {\n const { status, ctx } = this._processInputParams(input);\n const effect = this._def.effect || null;\n if (effect.type === \"preprocess\") {\n const processed = effect.transform(ctx.data);\n if (ctx.common.async) {\n return Promise.resolve(processed).then((processed) => {\n return this._def.schema._parseAsync({\n data: processed,\n path: ctx.path,\n parent: ctx,\n });\n });\n }\n else {\n return this._def.schema._parseSync({\n data: processed,\n path: ctx.path,\n parent: ctx,\n });\n }\n }\n const checkCtx = {\n addIssue: (arg) => {\n parseUtil_1.addIssueToContext(ctx, arg);\n if (arg.fatal) {\n status.abort();\n }\n else {\n status.dirty();\n }\n },\n get path() {\n return ctx.path;\n },\n };\n checkCtx.addIssue = checkCtx.addIssue.bind(checkCtx);\n if (effect.type === \"refinement\") {\n const executeRefinement = (acc\n // effect: RefinementEffect\n ) => {\n const result = effect.refinement(acc, checkCtx);\n if (ctx.common.async) {\n return Promise.resolve(result);\n }\n if (result instanceof Promise) {\n throw new Error(\"Async refinement encountered during synchronous parse operation. Use .parseAsync instead.\");\n }\n return acc;\n };\n if (ctx.common.async === false) {\n const inner = this._def.schema._parseSync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx,\n });\n if (inner.status === \"aborted\")\n return parseUtil_1.INVALID;\n if (inner.status === \"dirty\")\n status.dirty();\n // return value is ignored\n executeRefinement(inner.value);\n return { status: status.value, value: inner.value };\n }\n else {\n return this._def.schema\n ._parseAsync({ data: ctx.data, path: ctx.path, parent: ctx })\n .then((inner) => {\n if (inner.status === \"aborted\")\n return parseUtil_1.INVALID;\n if (inner.status === \"dirty\")\n status.dirty();\n return executeRefinement(inner.value).then(() => {\n return { status: status.value, value: inner.value };\n });\n });\n }\n }\n if (effect.type === \"transform\") {\n if (ctx.common.async === false) {\n const base = this._def.schema._parseSync({\n data: ctx.data,\n path: ctx.path,\n parent: ctx,\n });\n // if (base.status === \"aborted\") return INVALID;\n // if (base.status === \"dirty\") {\n // return { status: \"dirty\", value: base.value };\n // }\n if (!parseUtil_1.isValid(base))\n return base;\n const result = effect.transform(base.value, checkCtx);\n if (result instanceof Promise) {\n throw new Error(`Asynchronous transform encountered during synchronous parse operation. Use .parseAsync instead.`);\n }\n return { status: status.value, value: result };\n }\n else {\n return this._def.schema\n ._parseAsync({ data: ctx.data, path: ctx.path, parent: ctx })\n .then((base) => {\n if (!parseUtil_1.isValid(base))\n return base;\n // if (base.status === \"aborted\") return INVALID;\n // if (base.status === \"dirty\") {\n // return { status: \"dirty\", value: base.value };\n // }\n return Promise.resolve(effect.transform(base.value, checkCtx)).then((result) => ({ status: status.value, value: result }));\n });\n }\n }\n util_1.util.assertNever(effect);\n }\n}\nexports.ZodEffects = ZodEffects;\nexports.ZodTransformer = ZodEffects;\nZodEffects.create = (schema, effect, params) => {\n return new ZodEffects({\n schema,\n typeName: ZodFirstPartyTypeKind.ZodEffects,\n effect,\n ...processCreateParams(params),\n });\n};\nZodEffects.createWithPreprocess = (preprocess, schema, params) => {\n return new ZodEffects({\n schema,\n effect: { type: \"preprocess\", transform: preprocess },\n typeName: ZodFirstPartyTypeKind.ZodEffects,\n ...processCreateParams(params),\n });\n};\nclass ZodOptional extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType === util_1.ZodParsedType.undefined) {\n return parseUtil_1.OK(undefined);\n }\n return this._def.innerType._parse(input);\n }\n unwrap() {\n return this._def.innerType;\n }\n}\nexports.ZodOptional = ZodOptional;\nZodOptional.create = (type, params) => {\n return new ZodOptional({\n innerType: type,\n typeName: ZodFirstPartyTypeKind.ZodOptional,\n ...processCreateParams(params),\n });\n};\nclass ZodNullable extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType === util_1.ZodParsedType.null) {\n return parseUtil_1.OK(null);\n }\n return this._def.innerType._parse(input);\n }\n unwrap() {\n return this._def.innerType;\n }\n}\nexports.ZodNullable = ZodNullable;\nZodNullable.create = (type, params) => {\n return new ZodNullable({\n innerType: type,\n typeName: ZodFirstPartyTypeKind.ZodNullable,\n ...processCreateParams(params),\n });\n};\nclass ZodDefault extends ZodType {\n _parse(input) {\n const { ctx } = this._processInputParams(input);\n let data = ctx.data;\n if (ctx.parsedType === util_1.ZodParsedType.undefined) {\n data = this._def.defaultValue();\n }\n return this._def.innerType._parse({\n data,\n path: ctx.path,\n parent: ctx,\n });\n }\n removeDefault() {\n return this._def.innerType;\n }\n}\nexports.ZodDefault = ZodDefault;\nZodDefault.create = (type, params) => {\n return new ZodOptional({\n innerType: type,\n typeName: ZodFirstPartyTypeKind.ZodOptional,\n ...processCreateParams(params),\n });\n};\nclass ZodNaN extends ZodType {\n _parse(input) {\n const parsedType = this._getType(input);\n if (parsedType !== util_1.ZodParsedType.nan) {\n const ctx = this._getOrReturnCtx(input);\n parseUtil_1.addIssueToContext(ctx, {\n code: ZodError_1.ZodIssueCode.invalid_type,\n expected: util_1.ZodParsedType.nan,\n received: ctx.parsedType,\n });\n return parseUtil_1.INVALID;\n }\n return { status: \"valid\", value: input.data };\n }\n}\nexports.ZodNaN = ZodNaN;\nZodNaN.create = (params) => {\n return new ZodNaN({\n typeName: ZodFirstPartyTypeKind.ZodNaN,\n ...processCreateParams(params),\n });\n};\nconst custom = (check, params = {}, fatal) => {\n if (check)\n return ZodAny.create().superRefine((data, ctx) => {\n if (!check(data)) {\n const p = typeof params === \"function\" ? params(data) : params;\n const p2 = typeof p === \"string\" ? { message: p } : p;\n ctx.addIssue({ code: \"custom\", ...p2, fatal });\n }\n });\n return ZodAny.create();\n};\nexports.custom = custom;\nexports.late = {\n object: ZodObject.lazycreate,\n};\nvar ZodFirstPartyTypeKind;\n(function (ZodFirstPartyTypeKind) {\n ZodFirstPartyTypeKind[\"ZodString\"] = \"ZodString\";\n ZodFirstPartyTypeKind[\"ZodNumber\"] = \"ZodNumber\";\n ZodFirstPartyTypeKind[\"ZodNaN\"] = \"ZodNaN\";\n ZodFirstPartyTypeKind[\"ZodBigInt\"] = \"ZodBigInt\";\n ZodFirstPartyTypeKind[\"ZodBoolean\"] = \"ZodBoolean\";\n ZodFirstPartyTypeKind[\"ZodDate\"] = \"ZodDate\";\n ZodFirstPartyTypeKind[\"ZodUndefined\"] = \"ZodUndefined\";\n ZodFirstPartyTypeKind[\"ZodNull\"] = \"ZodNull\";\n ZodFirstPartyTypeKind[\"ZodAny\"] = \"ZodAny\";\n ZodFirstPartyTypeKind[\"ZodUnknown\"] = \"ZodUnknown\";\n ZodFirstPartyTypeKind[\"ZodNever\"] = \"ZodNever\";\n ZodFirstPartyTypeKind[\"ZodVoid\"] = \"ZodVoid\";\n ZodFirstPartyTypeKind[\"ZodArray\"] = \"ZodArray\";\n ZodFirstPartyTypeKind[\"ZodObject\"] = \"ZodObject\";\n ZodFirstPartyTypeKind[\"ZodUnion\"] = \"ZodUnion\";\n ZodFirstPartyTypeKind[\"ZodDiscriminatedUnion\"] = \"ZodDiscriminatedUnion\";\n ZodFirstPartyTypeKind[\"ZodIntersection\"] = \"ZodIntersection\";\n ZodFirstPartyTypeKind[\"ZodTuple\"] = \"ZodTuple\";\n ZodFirstPartyTypeKind[\"ZodRecord\"] = \"ZodRecord\";\n ZodFirstPartyTypeKind[\"ZodMap\"] = \"ZodMap\";\n ZodFirstPartyTypeKind[\"ZodSet\"] = \"ZodSet\";\n ZodFirstPartyTypeKind[\"ZodFunction\"] = \"ZodFunction\";\n ZodFirstPartyTypeKind[\"ZodLazy\"] = \"ZodLazy\";\n ZodFirstPartyTypeKind[\"ZodLiteral\"] = \"ZodLiteral\";\n ZodFirstPartyTypeKind[\"ZodEnum\"] = \"ZodEnum\";\n ZodFirstPartyTypeKind[\"ZodEffects\"] = \"ZodEffects\";\n ZodFirstPartyTypeKind[\"ZodNativeEnum\"] = \"ZodNativeEnum\";\n ZodFirstPartyTypeKind[\"ZodOptional\"] = \"ZodOptional\";\n ZodFirstPartyTypeKind[\"ZodNullable\"] = \"ZodNullable\";\n ZodFirstPartyTypeKind[\"ZodDefault\"] = \"ZodDefault\";\n ZodFirstPartyTypeKind[\"ZodPromise\"] = \"ZodPromise\";\n})(ZodFirstPartyTypeKind = exports.ZodFirstPartyTypeKind || (exports.ZodFirstPartyTypeKind = {}));\nconst instanceOfType = (cls, params = {\n message: `Input not instance of ${cls.name}`,\n}) => exports.custom((data) => data instanceof cls, params, true);\nexports.instanceof = instanceOfType;\nconst stringType = ZodString.create;\nexports.string = stringType;\nconst numberType = ZodNumber.create;\nexports.number = numberType;\nconst nanType = ZodNaN.create;\nexports.nan = nanType;\nconst bigIntType = ZodBigInt.create;\nexports.bigint = bigIntType;\nconst booleanType = ZodBoolean.create;\nexports.boolean = booleanType;\nconst dateType = ZodDate.create;\nexports.date = dateType;\nconst undefinedType = ZodUndefined.create;\nexports.undefined = undefinedType;\nconst nullType = ZodNull.create;\nexports.null = nullType;\nconst anyType = ZodAny.create;\nexports.any = anyType;\nconst unknownType = ZodUnknown.create;\nexports.unknown = unknownType;\nconst neverType = ZodNever.create;\nexports.never = neverType;\nconst voidType = ZodVoid.create;\nexports.void = voidType;\nconst arrayType = ZodArray.create;\nexports.array = arrayType;\nconst objectType = ZodObject.create;\nexports.object = objectType;\nconst strictObjectType = ZodObject.strictCreate;\nexports.strictObject = strictObjectType;\nconst unionType = ZodUnion.create;\nexports.union = unionType;\nconst discriminatedUnionType = ZodDiscriminatedUnion.create;\nexports.discriminatedUnion = discriminatedUnionType;\nconst intersectionType = ZodIntersection.create;\nexports.intersection = intersectionType;\nconst tupleType = ZodTuple.create;\nexports.tuple = tupleType;\nconst recordType = ZodRecord.create;\nexports.record = recordType;\nconst mapType = ZodMap.create;\nexports.map = mapType;\nconst setType = ZodSet.create;\nexports.set = setType;\nconst functionType = ZodFunction.create;\nexports.function = functionType;\nconst lazyType = ZodLazy.create;\nexports.lazy = lazyType;\nconst literalType = ZodLiteral.create;\nexports.literal = literalType;\nconst enumType = ZodEnum.create;\nexports.enum = enumType;\nconst nativeEnumType = ZodNativeEnum.create;\nexports.nativeEnum = nativeEnumType;\nconst promiseType = ZodPromise.create;\nexports.promise = promiseType;\nconst effectsType = ZodEffects.create;\nexports.effect = effectsType;\nexports.transformer = effectsType;\nconst optionalType = ZodOptional.create;\nexports.optional = optionalType;\nconst nullableType = ZodNullable.create;\nexports.nullable = nullableType;\nconst preprocessType = ZodEffects.createWithPreprocess;\nexports.preprocess = preprocessType;\nconst ostring = () => stringType().optional();\nexports.ostring = ostring;\nconst onumber = () => numberType().optional();\nexports.onumber = onumber;\nconst oboolean = () => booleanType().optional();\nexports.oboolean = oboolean;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.readConfig = void 0;\nconst core = __importStar(require(\"@actions/core\"));\nconst z = __importStar(require(\"zod\"));\nconst schemas_1 = require(\"./schemas\");\nfunction getOptionalInput(name) {\n const value = core.getInput(name);\n return value.length > 0 ? value : undefined;\n}\nfunction readConfig() {\n const fail_on_severity = z\n .enum(schemas_1.SEVERITIES)\n .default('low')\n .parse(getOptionalInput('fail-on-severity'));\n const allow_licenses = getOptionalInput('allow-licenses');\n const deny_licenses = getOptionalInput('deny-licenses');\n if (allow_licenses !== undefined && deny_licenses !== undefined) {\n throw new Error(\"Can't specify both allow_licenses and deny_licenses\");\n }\n return {\n fail_on_severity,\n allow_licenses: allow_licenses === null || allow_licenses === void 0 ? void 0 : allow_licenses.split(',').map(x => x.trim()),\n deny_licenses: deny_licenses === null || deny_licenses === void 0 ? void 0 : deny_licenses.split(',').map(x => x.trim())\n };\n}\nexports.readConfig = readConfig;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.filterChangesBySeverity = void 0;\nconst schemas_1 = require(\"./schemas\");\nfunction filterChangesBySeverity(severity, changes) {\n const severityIdx = schemas_1.SEVERITIES.indexOf(severity);\n let filteredChanges = [];\n for (let change of changes) {\n if (change === undefined ||\n change.vulnerabilities === undefined ||\n change.vulnerabilities.length === 0) {\n continue;\n }\n let fChange = Object.assign(Object.assign({}, change), { vulnerabilities: change.vulnerabilities.filter(vuln => {\n const vulnIdx = schemas_1.SEVERITIES.indexOf(vuln.severity);\n if (vulnIdx <= severityIdx) {\n return true;\n }\n }) });\n filteredChanges.push(fChange);\n }\n // don't want to deal with changes with no vulnerabilities\n filteredChanges = filteredChanges.filter(change => change.vulnerabilities.length > 0);\n return filteredChanges;\n}\nexports.filterChangesBySeverity = filterChangesBySeverity;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ChangesSchema = exports.ConfigurationOptionsSchema = exports.PullRequestSchema = exports.ChangeSchema = exports.SEVERITIES = void 0;\nconst z = __importStar(require(\"zod\"));\nexports.SEVERITIES = ['critical', 'high', 'moderate', 'low'];\nexports.ChangeSchema = z.object({\n change_type: z.enum(['added', 'removed']),\n manifest: z.string(),\n ecosystem: z.string(),\n name: z.string(),\n version: z.string(),\n package_url: z.string(),\n license: z.string().nullable(),\n source_repository_url: z.string().nullable(),\n vulnerabilities: z\n .array(z.object({\n severity: z.enum(['critical', 'high', 'moderate', 'low']),\n advisory_ghsa_id: z.string(),\n advisory_summary: z.string(),\n advisory_url: z.string()\n }))\n .optional()\n .default([])\n});\nexports.PullRequestSchema = z.object({\n number: z.number(),\n base: z.object({ sha: z.string() }),\n head: z.object({ sha: z.string() })\n});\nexports.ConfigurationOptionsSchema = z\n .object({\n fail_on_severity: z.enum(exports.SEVERITIES).default('low'),\n allow_licenses: z.array(z.string()).default([]),\n deny_licenses: z.array(z.string()).default([])\n})\n .partial()\n .refine(obj => !(obj.allow_licenses && obj.deny_licenses), \"Your workflow file has both an allow_licenses list and deny_licenses list, but you can only set one or the other.\");\nexports.ChangesSchema = z.array(exports.ChangeSchema);\n",null,"module.exports = require(\"assert\");","module.exports = require(\"events\");","module.exports = require(\"fs\");","module.exports = require(\"http\");","module.exports = require(\"https\");","module.exports = require(\"net\");","module.exports = require(\"os\");","module.exports = require(\"path\");","module.exports = require(\"punycode\");","module.exports = require(\"stream\");","module.exports = require(\"tls\");","module.exports = require(\"url\");","module.exports = require(\"util\");","module.exports = require(\"zlib\");","const ANSI_BACKGROUND_OFFSET = 10;\n\nconst wrapAnsi16 = (offset = 0) => code => `\\u001B[${code + offset}m`;\n\nconst wrapAnsi256 = (offset = 0) => code => `\\u001B[${38 + offset};5;${code}m`;\n\nconst wrapAnsi16m = (offset = 0) => (red, green, blue) => `\\u001B[${38 + offset};2;${red};${green};${blue}m`;\n\nfunction assembleStyles() {\n\tconst codes = new Map();\n\tconst styles = {\n\t\tmodifier: {\n\t\t\treset: [0, 0],\n\t\t\t// 21 isn't widely supported and 22 does the same thing\n\t\t\tbold: [1, 22],\n\t\t\tdim: [2, 22],\n\t\t\titalic: [3, 23],\n\t\t\tunderline: [4, 24],\n\t\t\toverline: [53, 55],\n\t\t\tinverse: [7, 27],\n\t\t\thidden: [8, 28],\n\t\t\tstrikethrough: [9, 29]\n\t\t},\n\t\tcolor: {\n\t\t\tblack: [30, 39],\n\t\t\tred: [31, 39],\n\t\t\tgreen: [32, 39],\n\t\t\tyellow: [33, 39],\n\t\t\tblue: [34, 39],\n\t\t\tmagenta: [35, 39],\n\t\t\tcyan: [36, 39],\n\t\t\twhite: [37, 39],\n\n\t\t\t// Bright color\n\t\t\tblackBright: [90, 39],\n\t\t\tredBright: [91, 39],\n\t\t\tgreenBright: [92, 39],\n\t\t\tyellowBright: [93, 39],\n\t\t\tblueBright: [94, 39],\n\t\t\tmagentaBright: [95, 39],\n\t\t\tcyanBright: [96, 39],\n\t\t\twhiteBright: [97, 39]\n\t\t},\n\t\tbgColor: {\n\t\t\tbgBlack: [40, 49],\n\t\t\tbgRed: [41, 49],\n\t\t\tbgGreen: [42, 49],\n\t\t\tbgYellow: [43, 49],\n\t\t\tbgBlue: [44, 49],\n\t\t\tbgMagenta: [45, 49],\n\t\t\tbgCyan: [46, 49],\n\t\t\tbgWhite: [47, 49],\n\n\t\t\t// Bright color\n\t\t\tbgBlackBright: [100, 49],\n\t\t\tbgRedBright: [101, 49],\n\t\t\tbgGreenBright: [102, 49],\n\t\t\tbgYellowBright: [103, 49],\n\t\t\tbgBlueBright: [104, 49],\n\t\t\tbgMagentaBright: [105, 49],\n\t\t\tbgCyanBright: [106, 49],\n\t\t\tbgWhiteBright: [107, 49]\n\t\t}\n\t};\n\n\t// Alias bright black as gray (and grey)\n\tstyles.color.gray = styles.color.blackBright;\n\tstyles.bgColor.bgGray = styles.bgColor.bgBlackBright;\n\tstyles.color.grey = styles.color.blackBright;\n\tstyles.bgColor.bgGrey = styles.bgColor.bgBlackBright;\n\n\tfor (const [groupName, group] of Object.entries(styles)) {\n\t\tfor (const [styleName, style] of Object.entries(group)) {\n\t\t\tstyles[styleName] = {\n\t\t\t\topen: `\\u001B[${style[0]}m`,\n\t\t\t\tclose: `\\u001B[${style[1]}m`\n\t\t\t};\n\n\t\t\tgroup[styleName] = styles[styleName];\n\n\t\t\tcodes.set(style[0], style[1]);\n\t\t}\n\n\t\tObject.defineProperty(styles, groupName, {\n\t\t\tvalue: group,\n\t\t\tenumerable: false\n\t\t});\n\t}\n\n\tObject.defineProperty(styles, 'codes', {\n\t\tvalue: codes,\n\t\tenumerable: false\n\t});\n\n\tstyles.color.close = '\\u001B[39m';\n\tstyles.bgColor.close = '\\u001B[49m';\n\n\tstyles.color.ansi = wrapAnsi16();\n\tstyles.color.ansi256 = wrapAnsi256();\n\tstyles.color.ansi16m = wrapAnsi16m();\n\tstyles.bgColor.ansi = wrapAnsi16(ANSI_BACKGROUND_OFFSET);\n\tstyles.bgColor.ansi256 = wrapAnsi256(ANSI_BACKGROUND_OFFSET);\n\tstyles.bgColor.ansi16m = wrapAnsi16m(ANSI_BACKGROUND_OFFSET);\n\n\t// From https://github.com/Qix-/color-convert/blob/3f0e0d4e92e235796ccb17f6e85c72094a651f49/conversions.js\n\tObject.defineProperties(styles, {\n\t\trgbToAnsi256: {\n\t\t\tvalue: (red, green, blue) => {\n\t\t\t\t// We use the extended greyscale palette here, with the exception of\n\t\t\t\t// black and white. normal palette only has 4 greyscale shades.\n\t\t\t\tif (red === green && green === blue) {\n\t\t\t\t\tif (red < 8) {\n\t\t\t\t\t\treturn 16;\n\t\t\t\t\t}\n\n\t\t\t\t\tif (red > 248) {\n\t\t\t\t\t\treturn 231;\n\t\t\t\t\t}\n\n\t\t\t\t\treturn Math.round(((red - 8) / 247) * 24) + 232;\n\t\t\t\t}\n\n\t\t\t\treturn 16 +\n\t\t\t\t\t(36 * Math.round(red / 255 * 5)) +\n\t\t\t\t\t(6 * Math.round(green / 255 * 5)) +\n\t\t\t\t\tMath.round(blue / 255 * 5);\n\t\t\t},\n\t\t\tenumerable: false\n\t\t},\n\t\thexToRgb: {\n\t\t\tvalue: hex => {\n\t\t\t\tconst matches = /(?[a-f\\d]{6}|[a-f\\d]{3})/i.exec(hex.toString(16));\n\t\t\t\tif (!matches) {\n\t\t\t\t\treturn [0, 0, 0];\n\t\t\t\t}\n\n\t\t\t\tlet {colorString} = matches.groups;\n\n\t\t\t\tif (colorString.length === 3) {\n\t\t\t\t\tcolorString = colorString.split('').map(character => character + character).join('');\n\t\t\t\t}\n\n\t\t\t\tconst integer = Number.parseInt(colorString, 16);\n\n\t\t\t\treturn [\n\t\t\t\t\t(integer >> 16) & 0xFF,\n\t\t\t\t\t(integer >> 8) & 0xFF,\n\t\t\t\t\tinteger & 0xFF\n\t\t\t\t];\n\t\t\t},\n\t\t\tenumerable: false\n\t\t},\n\t\thexToAnsi256: {\n\t\t\tvalue: hex => styles.rgbToAnsi256(...styles.hexToRgb(hex)),\n\t\t\tenumerable: false\n\t\t},\n\t\tansi256ToAnsi: {\n\t\t\tvalue: code => {\n\t\t\t\tif (code < 8) {\n\t\t\t\t\treturn 30 + code;\n\t\t\t\t}\n\n\t\t\t\tif (code < 16) {\n\t\t\t\t\treturn 90 + (code - 8);\n\t\t\t\t}\n\n\t\t\t\tlet red;\n\t\t\t\tlet green;\n\t\t\t\tlet blue;\n\n\t\t\t\tif (code >= 232) {\n\t\t\t\t\tred = (((code - 232) * 10) + 8) / 255;\n\t\t\t\t\tgreen = red;\n\t\t\t\t\tblue = red;\n\t\t\t\t} else {\n\t\t\t\t\tcode -= 16;\n\n\t\t\t\t\tconst remainder = code % 36;\n\n\t\t\t\t\tred = Math.floor(code / 36) / 5;\n\t\t\t\t\tgreen = Math.floor(remainder / 6) / 5;\n\t\t\t\t\tblue = (remainder % 6) / 5;\n\t\t\t\t}\n\n\t\t\t\tconst value = Math.max(red, green, blue) * 2;\n\n\t\t\t\tif (value === 0) {\n\t\t\t\t\treturn 30;\n\t\t\t\t}\n\n\t\t\t\tlet result = 30 + ((Math.round(blue) << 2) | (Math.round(green) << 1) | Math.round(red));\n\n\t\t\t\tif (value === 2) {\n\t\t\t\t\tresult += 60;\n\t\t\t\t}\n\n\t\t\t\treturn result;\n\t\t\t},\n\t\t\tenumerable: false\n\t\t},\n\t\trgbToAnsi: {\n\t\t\tvalue: (red, green, blue) => styles.ansi256ToAnsi(styles.rgbToAnsi256(red, green, blue)),\n\t\t\tenumerable: false\n\t\t},\n\t\thexToAnsi: {\n\t\t\tvalue: hex => styles.ansi256ToAnsi(styles.hexToAnsi256(hex)),\n\t\t\tenumerable: false\n\t\t}\n\t});\n\n\treturn styles;\n}\n\nconst ansiStyles = assembleStyles();\n\nexport default ansiStyles;\n","// The module cache\nvar __webpack_module_cache__ = {};\n\n// The require function\nfunction __webpack_require__(moduleId) {\n\t// Check if module is in cache\n\tvar cachedModule = __webpack_module_cache__[moduleId];\n\tif (cachedModule !== undefined) {\n\t\treturn cachedModule.exports;\n\t}\n\t// Create a new module (and put it into the cache)\n\tvar module = __webpack_module_cache__[moduleId] = {\n\t\t// no module.id needed\n\t\t// no module.loaded needed\n\t\texports: {}\n\t};\n\n\t// Execute the module function\n\tvar threw = true;\n\ttry {\n\t\t__webpack_modules__[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\t\tthrew = false;\n\t} finally {\n\t\tif(threw) delete __webpack_module_cache__[moduleId];\n\t}\n\n\t// Return the exports of the module\n\treturn module.exports;\n}\n\n","// define getter functions for harmony exports\n__webpack_require__.d = (exports, definition) => {\n\tfor(var key in definition) {\n\t\tif(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {\n\t\t\tObject.defineProperty(exports, key, { enumerable: true, get: definition[key] });\n\t\t}\n\t}\n};","__webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))","// define __esModule on exports\n__webpack_require__.r = (exports) => {\n\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n\t}\n\tObject.defineProperty(exports, '__esModule', { value: true });\n};","\nif (typeof __webpack_require__ !== 'undefined') __webpack_require__.ab = __dirname + \"/\";","","// startup\n// Load entry module and return exports\n// This entry module is referenced by other modules so it can't be inlined\nvar __webpack_exports__ = __webpack_require__(3109);\n",""],"names":[],"sourceRoot":""} \ No newline at end of file diff --git a/dist/licenses.txt b/dist/licenses.txt index c0b211fbb..fdacc979a 100644 --- a/dist/licenses.txt +++ b/dist/licenses.txt @@ -684,23 +684,6 @@ ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. -yaml -ISC -Copyright Eemeli Aro - -Permission to use, copy, modify, and/or distribute this software for any purpose -with or without fee is hereby granted, provided that the above copyright notice -and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF -THIS SOFTWARE. - - zod MIT MIT License diff --git a/src/config.ts b/src/config.ts index ae32d13eb..38cd8152e 100644 --- a/src/config.ts +++ b/src/config.ts @@ -1,29 +1,27 @@ -import * as fs from 'fs' -import YAML from 'yaml' -import {ConfigurationOptions, ConfigurationOptionsSchema} from './schemas' -import path from 'path' +import * as core from '@actions/core' +import * as z from 'zod' +import {ConfigurationOptions, SEVERITIES} from './schemas' -export const CONFIG_FILEPATH = './.github/dependency-review.yml' - -export function readConfigFile( - filePath: string = CONFIG_FILEPATH -): ConfigurationOptions { - // By default we want to fail on all severities and allow all licenses. - const defaultOptions: ConfigurationOptions = { - fail_on_severity: 'low' - } +function getOptionalInput(name: string): string | undefined { + const value = core.getInput(name) + return value.length > 0 ? value : undefined +} - let data +export function readConfig(): ConfigurationOptions { + const fail_on_severity = z + .enum(SEVERITIES) + .default('low') + .parse(getOptionalInput('fail-on-severity')) + const allow_licenses = getOptionalInput('allow-licenses') + const deny_licenses = getOptionalInput('deny-licenses') - try { - data = fs.readFileSync(path.resolve(filePath), 'utf-8') - } catch (error: any) { - if (error.code && error.code === 'ENOENT') { - return defaultOptions - } else { - throw error - } + if (allow_licenses !== undefined && deny_licenses !== undefined) { + throw new Error("Can't specify both allow_licenses and deny_licenses") } - return ConfigurationOptionsSchema.parse(YAML.parse(data)) + return { + fail_on_severity, + allow_licenses: allow_licenses?.split(',').map(x => x.trim()), + deny_licenses: deny_licenses?.split(',').map(x => x.trim()) + } } diff --git a/src/licenses.ts b/src/licenses.ts index 8f5afb370..2d2b53500 100644 --- a/src/licenses.ts +++ b/src/licenses.ts @@ -10,7 +10,7 @@ import {Change, ChangeSchema} from './schemas' * we will ignore the deny list. * @param {Change[]} changes The list of changes to filter. * @param { { allow?: string[], deny?: string[]}} licenses An object with `allow`/`deny` keys, each containing a list of licenses. - * @returns {Array, Array, @@ -18,15 +18,17 @@ export function getDeniedLicenseChanges( allow?: Array deny?: Array } -): Array { +): [Array, Array] { let {allow, deny} = licenses let disallowed: Change[] = [] + let unknown: Change[] = [] for (const change of changes) { let license = change.license // TODO: be loud about unknown licenses if (license === null) { + unknown.push(change) continue } if (allow !== undefined) { @@ -40,5 +42,5 @@ export function getDeniedLicenseChanges( } } - return disallowed + return [disallowed, unknown] } diff --git a/src/main.ts b/src/main.ts index 9bca8f4f4..972092f77 100644 --- a/src/main.ts +++ b/src/main.ts @@ -4,7 +4,7 @@ import * as github from '@actions/github' import styles from 'ansi-styles' import {RequestError} from '@octokit/request-error' import {Change, PullRequestSchema, Severity} from './schemas' -import {readConfigFile} from '../src/config' +import {readConfig} from '../src/config' import {filterChangesBySeverity} from '../src/filter' import {getDeniedLicenseChanges} from './licenses' @@ -27,7 +27,7 @@ async function run(): Promise { headRef: pull_request.head.sha }) - let config = readConfigFile() + let config = readConfig() let minSeverity = config.fail_on_severity let failed = false @@ -36,14 +36,6 @@ async function run(): Promise { deny: config.deny_licenses } - let licenseErrors = getDeniedLicenseChanges(changes, licenses) - - if (licenseErrors.length > 0) { - printLicensesError(licenseErrors, licenses) - core.setFailed('Dependency review detected prohibited licenses.') - return - } - let filteredChanges = filterChangesBySeverity( minSeverity as Severity, changes @@ -60,11 +52,22 @@ async function run(): Promise { } } + let [licenseErrors, unknownLicenses] = getDeniedLicenseChanges( + changes, + licenses + ) + + if (licenseErrors.length > 0) { + printLicensesError(licenseErrors, licenses) + printNullLicenses(unknownLicenses) + core.setFailed('Dependency review detected incompatible licenses.') + } + if (failed) { - throw new Error('Dependency review detected vulnerable packages.') + core.setFailed('Dependency review detected vulnerable packages.') } else { core.info( - `Dependency review did not detect any vulnerable packages with severity level "${minSeverity}" or above.` + `Dependency review did not detect any vulnerable packages with severity level "${minSeverity}" or higher.` ) } } catch (error) { @@ -126,20 +129,19 @@ function printLicensesError( let {allow = [], deny = []} = licenses - core.info('Dependency review detected prohibited licenses.') - - if (allow.length > 0) { - core.info('\nAllowed licenses: ' + allow.join(', ') + '\n') - } - - if (deny.length > 0) { - core.info('\nDenied licenses: ' + deny.join(', ') + '\n') + core.info('\nThe following dependencies have incompatible licenses:\n') + for (const change of changes) { + core.info( + `${styles.bold.open}${change.manifest} » ${change.name}@${change.version}${styles.bold.close} – License: ${styles.color.red.open}${change.license}${styles.color.red.close}` + ) } +} - core.info('The following dependencies have incompatible licenses:\n') +function printNullLicenses(changes: Array): void { + core.info('\nWe could not detect a license for the following dependencies:\n') for (const change of changes) { core.info( - `${styles.bold.open}${change.manifest} » ${change.name}@${change.version}${styles.bold.close} – License: ${styles.color.red.open}${change.license}${styles.color.red.close}` + `${styles.bold.open}${change.manifest} » ${change.name}@${change.version}${styles.bold.close}` ) } } diff --git a/src/schemas.ts b/src/schemas.ts index 5fde60ecf..5667eb08b 100644 --- a/src/schemas.ts +++ b/src/schemas.ts @@ -39,7 +39,7 @@ export const ConfigurationOptionsSchema = z .partial() .refine( obj => !(obj.allow_licenses && obj.deny_licenses), - "Can't specify both allow_licenses and deny_licenses" + "Your workflow file has both an allow_licenses list and deny_licenses list, but you can only set one or the other." ) export const ChangesSchema = z.array(ChangeSchema)